Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • devel
  • hruska-feature-#6799-filter-keys
  • hruska-feature-5066-duplicateIdeaID
  • hruska-feature-clients-api
  • malostik-#5066-deduplicate-idea-ids
  • master
  • warden-postgresql-port
  • warden-client-1.1.0
  • warden-client-1.2.0
  • warden-client-2.0
  • warden-client-2.0.0-beta1
  • warden-client-2.0.0-beta2
  • warden-client-2.1
  • warden-client-2.1-beta
  • warden-client-2.2
  • warden-client-2.2-final
  • warden-client-3.0-beta0
  • warden-client-3.0-beta1
  • warden-client-3.0-beta2
  • warden-client-3.0-beta3
  • warden-server-0.1.0
  • warden-server-2.0
  • warden-server-2.0.0-beta1
  • warden-server-2.1
  • warden-server-2.1-aplha1
  • warden-server-2.1-beta1
  • warden-server-2.1-beta2
  • warden-server-2.1-beta3
  • warden-server-2.1-beta4
  • warden-server-2.1-beta5
  • warden-server-2.1-beta6
  • warden-server-2.1-patch1
  • warden-server-2.2
  • warden-server-2.2-final
  • warden-server-2.2-patch1
  • warden-server-2.2-patch3
  • warden-server-3.0-beta0
  • warden-server-3.0-beta1
  • warden-server-3.0-beta2
  • warden-server-3.0-beta3
40 results

Target

Select target project
  • Pavel.Valach/warden
1 result
Select Git revision
  • devel
  • hruska-feature-#6799-filter-keys
  • hruska-feature-5066-duplicateIdeaID
  • hruska-feature-clients-api
  • malostik-#5066-deduplicate-idea-ids
  • master
  • warden-postgresql-port
  • warden-client-1.1.0
  • warden-client-1.2.0
  • warden-client-2.0
  • warden-client-2.0.0-beta1
  • warden-client-2.0.0-beta2
  • warden-client-2.1
  • warden-client-2.1-beta
  • warden-client-2.2
  • warden-client-2.2-final
  • warden-client-3.0-beta0
  • warden-client-3.0-beta1
  • warden-client-3.0-beta2
  • warden-client-3.0-beta3
  • warden-server-0.1.0
  • warden-server-2.0
  • warden-server-2.0.0-beta1
  • warden-server-2.1
  • warden-server-2.1-aplha1
  • warden-server-2.1-beta1
  • warden-server-2.1-beta2
  • warden-server-2.1-beta3
  • warden-server-2.1-beta4
  • warden-server-2.1-beta5
  • warden-server-2.1-beta6
  • warden-server-2.1-patch1
  • warden-server-2.2
  • warden-server-2.2-final
  • warden-server-2.2-patch1
  • warden-server-2.2-patch3
  • warden-server-3.0-beta0
  • warden-server-3.0-beta1
  • warden-server-3.0-beta2
  • warden-server-3.0-beta3
40 results
Show changes
...@@ -4,23 +4,39 @@ ...@@ -4,23 +4,39 @@
# Copyright (C) 2011-2015 Cesnet z.s.p.o # Copyright (C) 2011-2015 Cesnet z.s.p.o
# Use of this source is governed by a 3-clause BSD-style license, see LICENSE file. # Use of this source is governed by a 3-clause BSD-style license, see LICENSE file.
from __future__ import print_function
import sys import sys
import os import os
import io
from os import path
import logging import logging
import logging.handlers import logging.handlers
import ConfigParser
from traceback import format_tb
import M2Crypto.X509
import json import json
import MySQLdb as my
import MySQLdb.cursors as mycursors
import re import re
import email.utils from traceback import format_tb
from collections import namedtuple from collections import namedtuple
from time import sleep from time import sleep
from urlparse import parse_qs
from os import path
from random import randint from random import randint
import M2Crypto.X509
import MySQLdb as my
import MySQLdb.cursors as mycursors
if sys.version_info[0] >= 3:
import configparser as ConfigParser
from urllib.parse import parse_qs
unicode = str
def get_method_params(method):
return method.__code__.co_varnames[:method.__code__.co_argcount]
else:
import ConfigParser
from urlparse import parse_qs
def get_method_params(method):
return method.func_code.co_varnames[:method.func_code.co_argcount]
# for local version of up to date jsonschema # for local version of up to date jsonschema
sys.path.append(path.join(path.dirname(__file__), "..", "lib")) sys.path.append(path.join(path.dirname(__file__), "..", "lib"))
...@@ -28,7 +44,8 @@ sys.path.append(path.join(path.dirname(__file__), "..", "lib")) ...@@ -28,7 +44,8 @@ sys.path.append(path.join(path.dirname(__file__), "..", "lib"))
from jsonschema import Draft4Validator from jsonschema import Draft4Validator
VERSION = "3.0-beta2" VERSION = "3.0-beta3"
class Error(Exception): class Error(Exception):
...@@ -39,11 +56,9 @@ class Error(Exception): ...@@ -39,11 +56,9 @@ class Error(Exception):
if errors: if errors:
self.errors.extend(errors) self.errors.extend(errors)
def append(self, _events=None, **kwargs): def append(self, _events=None, **kwargs):
self.errors.append(kwargs) self.errors.append(kwargs)
def get_http_err_msg(self): def get_http_err_msg(self):
try: try:
err = self.errors[0]["error"] err = self.errors[0]["error"]
...@@ -62,11 +77,9 @@ class Error(Exception): ...@@ -62,11 +77,9 @@ class Error(Exception):
msg = "Multiple errors" msg = "Multiple errors"
return err, msg return err, msg
def __str__(self): def __str__(self):
return "\n".join(self.str_err(e) for e in self.errors) return "\n".join(self.str_err(e) for e in self.errors)
def log(self, logger, prio=logging.ERROR): def log(self, logger, prio=logging.ERROR):
for e in self.errors: for e in self.errors:
logger.log(prio, self.str_err(e)) logger.log(prio, self.str_err(e))
...@@ -77,7 +90,6 @@ class Error(Exception): ...@@ -77,7 +90,6 @@ class Error(Exception):
if debug: if debug:
logger.debug(debug) logger.debug(debug)
def str_err(self, e): def str_err(self, e):
out = [] out = []
out.append("Error(%s) %s " % (e.get("error", 0), e.get("message", "Unknown error"))) out.append("Error(%s) %s " % (e.get("error", 0), e.get("message", "Unknown error")))
...@@ -85,7 +97,6 @@ class Error(Exception): ...@@ -85,7 +97,6 @@ class Error(Exception):
out.append("(cause was %s: %s)" % (e["exc"][0].__name__, str(e["exc"][1]))) out.append("(cause was %s: %s)" % (e["exc"][0].__name__, str(e["exc"][1])))
return "".join(out) return "".join(out)
def str_info(self, e): def str_info(self, e):
ecopy = dict(e) # shallow copy ecopy = dict(e) # shallow copy
ecopy.pop("req_id", None) ecopy.pop("req_id", None)
...@@ -99,10 +110,9 @@ class Error(Exception): ...@@ -99,10 +110,9 @@ class Error(Exception):
out = "" out = ""
return out return out
def str_debug(self, e): def str_debug(self, e):
out = [] out = []
if not "exc" in e or not e["exc"]: if not e.get("exc"):
return "" return ""
exc_tb = e["exc"][2] exc_tb = e["exc"][2]
if exc_tb: if exc_tb:
...@@ -110,7 +120,6 @@ class Error(Exception): ...@@ -110,7 +120,6 @@ class Error(Exception):
out.extend(format_tb(exc_tb)) out.extend(format_tb(exc_tb))
return "".join(out) return "".join(out)
def to_dict(self): def to_dict(self):
errlist = [] errlist = []
for e in self.errors: for e in self.errors:
...@@ -125,7 +134,6 @@ class Error(Exception): ...@@ -125,7 +134,6 @@ class Error(Exception):
return d return d
def get_clean_root_logger(level=logging.INFO): def get_clean_root_logger(level=logging.INFO):
""" Attempts to get logging module into clean slate state """ """ Attempts to get logging module into clean slate state """
...@@ -140,6 +148,7 @@ def get_clean_root_logger(level=logging.INFO): ...@@ -140,6 +148,7 @@ def get_clean_root_logger(level=logging.INFO):
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.setLevel(level) logger.setLevel(level)
while logger.handlers: while logger.handlers:
logger.handlers[0].close()
logger.removeHandler(logger.handlers[0]) logger.removeHandler(logger.handlers[0])
while logger.filters: while logger.filters:
logger.removeFilter(logger.filters[0]) logger.removeFilter(logger.filters[0])
...@@ -147,7 +156,6 @@ def get_clean_root_logger(level=logging.INFO): ...@@ -147,7 +156,6 @@ def get_clean_root_logger(level=logging.INFO):
return logger return logger
def StreamLogger(stream=sys.stderr, level=logging.DEBUG): def StreamLogger(stream=sys.stderr, level=logging.DEBUG):
""" Fallback handler just for setup, not meant to be used from """ Fallback handler just for setup, not meant to be used from
configuration file because during wsgi query stdout/stderr configuration file because during wsgi query stdout/stderr
...@@ -162,7 +170,6 @@ def StreamLogger(stream=sys.stderr, level=logging.DEBUG): ...@@ -162,7 +170,6 @@ def StreamLogger(stream=sys.stderr, level=logging.DEBUG):
return logger return logger
class LogRequestFilter(logging.Filter): class LogRequestFilter(logging.Filter):
""" Filter class, instance of which is added to logger class to add """ Filter class, instance of which is added to logger class to add
info about request automatically into every logline, no matter info about request automatically into every logline, no matter
...@@ -173,7 +180,6 @@ class LogRequestFilter(logging.Filter): ...@@ -173,7 +180,6 @@ class LogRequestFilter(logging.Filter):
logging.Filter.__init__(self) logging.Filter.__init__(self)
self.req = req self.req = req
def filter(self, record): def filter(self, record):
if self.req.env: if self.req.env:
record.req_preamble = "%08x/%s: " % (self.req.req_id or 0, self.req.path) record.req_preamble = "%08x/%s: " % (self.req.req_id or 0, self.req.path)
...@@ -182,7 +188,6 @@ class LogRequestFilter(logging.Filter): ...@@ -182,7 +188,6 @@ class LogRequestFilter(logging.Filter):
return True return True
def FileLogger(req, filename, level=logging.INFO): def FileLogger(req, filename, level=logging.INFO):
fhand = logging.FileHandler(filename) fhand = logging.FileHandler(filename)
...@@ -192,11 +197,10 @@ def FileLogger(req, filename, level=logging.INFO): ...@@ -192,11 +197,10 @@ def FileLogger(req, filename, level=logging.INFO):
logger = get_clean_root_logger(level) logger = get_clean_root_logger(level)
logger.addFilter(ffilt) logger.addFilter(ffilt)
logger.addHandler(fhand) logger.addHandler(fhand)
logging.info("Initialized FileLogger(req=%s, filename=\"%s\", level=\"%d\")" % (type(req).__name__, filename, level)) logger.info("Initialized FileLogger(req=%r, filename=\"%s\", level=%s)" % (req, filename, level))
return logger return logger
def SysLogger(req, socket="/dev/log", facility=logging.handlers.SysLogHandler.LOG_DAEMON, level=logging.INFO): def SysLogger(req, socket="/dev/log", facility=logging.handlers.SysLogHandler.LOG_DAEMON, level=logging.INFO):
fhand = logging.handlers.SysLogHandler(address=socket, facility=facility) fhand = logging.handlers.SysLogHandler(address=socket, facility=facility)
...@@ -206,21 +210,21 @@ def SysLogger(req, socket="/dev/log", facility=logging.handlers.SysLogHandler.LO ...@@ -206,21 +210,21 @@ def SysLogger(req, socket="/dev/log", facility=logging.handlers.SysLogHandler.LO
logger = get_clean_root_logger(level) logger = get_clean_root_logger(level)
logger.addFilter(ffilt) logger.addFilter(ffilt)
logger.addHandler(fhand) logger.addHandler(fhand)
logging.info("Initialized SysLogger(req=%s, socket=\"%s\", facility=\"%d\", level=\"%d\")" % (type(req).__name__, socket, facility, level)) logger.info("Initialized SysLogger(req=%r, socket=\"%s\", facility=\"%d\", level=%s)" % (req, socket, facility, level))
return logger return logger
Client = namedtuple("Client", [
Client = namedtuple("Client", "id", "registered", "requestor", "hostname", "name",
["id", "registered", "requestor", "hostname", "name",
"secret", "valid", "read", "debug", "write", "test", "note"]) "secret", "valid", "read", "debug", "write", "test", "note"])
class Object(object): class Object(object):
def __str__(self): def __str__(self):
return "%s()" % type(self).__name__ attrs = get_method_params(self.__init__)[1:]
eq_str = ["%s=%r" % (attr, getattr(self, attr, None)) for attr in attrs]
return "%s(%s)" % (type(self).__name__, ", ".join(eq_str))
class Request(Object): class Request(Object):
...@@ -240,15 +244,6 @@ class Request(Object): ...@@ -240,15 +244,6 @@ class Request(Object):
which data their main codepaths work with. which data their main codepaths work with.
""" """
def __init__(self):
Object.__init__(self)
self.reset()
def __str__(self):
return "%s()" % (type(self).__name__, str(self.env), str(self.client))
def reset(self, env=None, client=None, path=None, req_id=None): def reset(self, env=None, client=None, path=None, req_id=None):
self.env = env self.env = env
self.client = client self.client = client
...@@ -258,12 +253,12 @@ class Request(Object): ...@@ -258,12 +253,12 @@ class Request(Object):
else: else:
self.req_id = 0 if env is None else randint(0x00000000, 0xFFFFFFFF) self.req_id = 0 if env is None else randint(0x00000000, 0xFFFFFFFF)
__init__ = reset
def error(self, **kwargs): def error(self, **kwargs):
return Error(self.path, self.req_id, **kwargs) return Error(self.path, self.req_id, **kwargs)
class ObjectBase(Object): class ObjectBase(Object):
def __init__(self, req, log): def __init__(self, req, log):
...@@ -272,22 +267,12 @@ class ObjectBase(Object): ...@@ -272,22 +267,12 @@ class ObjectBase(Object):
self.log = log self.log = log
def __str__(self):
return "%s(req=%s)" % (type(self).__name__, type(self.req).__name__)
class PlainAuthenticator(ObjectBase): class PlainAuthenticator(ObjectBase):
def __init__(self, req, log, db): def __init__(self, req, log, db):
ObjectBase.__init__(self, req, log) ObjectBase.__init__(self, req, log)
self.db = db self.db = db
def __str__(self):
return "%s(req=%s, db=%s)" % (type(self).__name__, type(self.req).__name__, type(self.db).__name__)
def authenticate(self, env, args, hostnames=None, check_secret=True): def authenticate(self, env, args, hostnames=None, check_secret=True):
name = args.get("client", [None])[0] name = args.get("client", [None])[0]
secret = args.get("secret", [None])[0] if check_secret else None secret = args.get("secret", [None])[0] if check_secret else None
...@@ -313,7 +298,6 @@ class PlainAuthenticator(ObjectBase): ...@@ -313,7 +298,6 @@ class PlainAuthenticator(ObjectBase):
return client return client
def authorize(self, env, client, path, method): def authorize(self, env, client, path, method):
if method.debug: if method.debug:
if not client.debug: if not client.debug:
...@@ -355,49 +339,75 @@ class X509Authenticator(PlainAuthenticator): ...@@ -355,49 +339,75 @@ class X509Authenticator(PlainAuthenticator):
firstcommon = commons[0] firstcommon = commons[0]
return [firstcommon] + list(set(altnames+commons) - set([firstcommon])) return [firstcommon] + list(set(altnames+commons) - set([firstcommon]))
def is_verified_by_apache(self, env, args):
# Allows correct work while SSLVerifyClient both "optional" and "required"
verify = env.get("SSL_CLIENT_VERIFY")
if verify == "SUCCESS":
return True
exception = self.req.error(
message="authenticate: certificate verification failed",
error=403, args=args, ssl_client_verify=verify, cert=env.get("SSL_CLIENT_CERT"))
exception.log(self.log)
return False
def authenticate(self, env, args): def authenticate(self, env, args):
if not self.is_verified_by_apache(env, args):
return None
try: try:
cert_names = self.get_cert_dns_names(env["SSL_CLIENT_CERT"]) cert_names = self.get_cert_dns_names(env["SSL_CLIENT_CERT"])
except: except:
exception = self.req.error(message="authenticate: cannot get or parse certificate from env", error=403, exc=sys.exc_info(), env=env) exception = self.req.error(
message="authenticate: cannot get or parse certificate from env",
error=403, exc=sys.exc_info(), env=env)
exception.log(self.log) exception.log(self.log)
return None return None
return PlainAuthenticator.authenticate(self, env, args, hostnames=cert_names) return PlainAuthenticator.authenticate(self, env, args, hostnames=cert_names)
class X509NameAuthenticator(PlainAuthenticator): class X509NameAuthenticator(X509Authenticator):
def authenticate(self, env, args): def authenticate(self, env, args):
if not self.is_verified_by_apache(env, args):
return None
try: try:
cert_name = env["SSL_CLIENT_S_DN_CN"] cert_name = env["SSL_CLIENT_S_DN_CN"]
except: except:
exception = self.req.error(message="authenticate: cannot get or parse certificate from env", error=403, exc=sys.exc_info(), env=env) exception = self.req.error(
message="authenticate: cannot get or parse certificate from env",
error=403, exc=sys.exc_info(), env=env)
exception.log(self.log) exception.log(self.log)
return None return None
if cert_name != args.setdefault("client", [cert_name])[0]: if cert_name != args.setdefault("client", [cert_name])[0]:
exception = self.req.error(message="authenticate: client name does not correspond with certificate", error=403, cn = cert_name, args = args) exception = self.req.error(
message="authenticate: client name does not correspond with certificate",
error=403, cn=cert_name, args=args)
exception.log(self.log) exception.log(self.log)
return None return None
return PlainAuthenticator.authenticate(self, env, args, check_secret=False) return PlainAuthenticator.authenticate(self, env, args, check_secret=False)
class X509MixMatchAuthenticator(PlainAuthenticator): class X509MixMatchAuthenticator(X509Authenticator):
def __init__(self, req, log, db): def __init__(self, req, log, db):
PlainAuthenticator.__init__(self, req, log, db) PlainAuthenticator.__init__(self, req, log, db)
self.hostname_auth = X509Authenticator(req, log, db) self.hostname_auth = X509Authenticator(req, log, db)
self.name_auth = X509NameAuthenticator(req, log, db) self.name_auth = X509NameAuthenticator(req, log, db)
def authenticate(self, env, args): def authenticate(self, env, args):
if not self.is_verified_by_apache(env, args):
return None
try: try:
cert_name = env["SSL_CLIENT_S_DN_CN"] cert_name = env["SSL_CLIENT_S_DN_CN"]
except: except:
exception = self.req.error(message="authenticate: cannot get or parse certificate from env", error=403, exc=sys.exc_info(), env=env) exception = self.req.error(
message="authenticate: cannot get or parse certificate from env",
error=403, exc=sys.exc_info(), env=env)
exception.log(self.log) exception.log(self.log)
return None return None
name = args.get("client", [None])[0] name = args.get("client", [None])[0]
...@@ -426,11 +436,6 @@ class NoValidator(ObjectBase): ...@@ -426,11 +436,6 @@ class NoValidator(ObjectBase):
def __init__(self, req, log): def __init__(self, req, log):
ObjectBase.__init__(self, req, log) ObjectBase.__init__(self, req, log)
def __str__(self):
return "%s(req=%s)" % (type(self).__name__, type(self.req).__name__)
def check(self, event): def check(self, event):
return [] return []
...@@ -440,15 +445,10 @@ class JSONSchemaValidator(NoValidator): ...@@ -440,15 +445,10 @@ class JSONSchemaValidator(NoValidator):
def __init__(self, req, log, filename=None): def __init__(self, req, log, filename=None):
NoValidator.__init__(self, req, log) NoValidator.__init__(self, req, log)
self.path = filename or path.join(path.dirname(__file__), "idea.schema") self.path = filename or path.join(path.dirname(__file__), "idea.schema")
with open(self.path) as f: with io.open(self.path, "r", encoding="utf-8") as f:
self.schema = json.load(f) self.schema = json.load(f)
self.validator = Draft4Validator(self.schema) self.validator = Draft4Validator(self.schema)
def __str__(self):
return "%s(req=%s, filename=\"%s\")" % (type(self).__name__, type(self.req).__name__, self.path)
def check(self, event): def check(self, event):
def sortkey(k): def sortkey(k):
...@@ -469,10 +469,10 @@ class JSONSchemaValidator(NoValidator): ...@@ -469,10 +469,10 @@ class JSONSchemaValidator(NoValidator):
return res return res
class MySQL(ObjectBase): class MySQL(ObjectBase):
def __init__(self, req, log, host, user, password, dbname, port, retry_count, def __init__(
self, req, log, host, user, password, dbname, port, retry_count,
retry_pause, event_size_limit, catmap_filename, tagmap_filename): retry_pause, event_size_limit, catmap_filename, tagmap_filename):
ObjectBase.__init__(self, req, log) ObjectBase.__init__(self, req, log)
self.host = host self.host = host
...@@ -482,78 +482,112 @@ class MySQL(ObjectBase): ...@@ -482,78 +482,112 @@ class MySQL(ObjectBase):
self.port = port self.port = port
self.retry_count = retry_count self.retry_count = retry_count
self.retry_pause = retry_pause self.retry_pause = retry_pause
self.retry_attempt = 0
self.event_size_limit = event_size_limit self.event_size_limit = event_size_limit
self.catmap_filename = catmap_filename self.catmap_filename = catmap_filename
self.tagmap_filename = tagmap_filename self.tagmap_filename = tagmap_filename
with open(catmap_filename, "r") as catmap_fd: with io.open(catmap_filename, "r", encoding="utf-8") as catmap_fd:
self.catmap = json.load(catmap_fd) self.catmap = json.load(catmap_fd)
self.catmap_other = self.catmap["Other"] # Catch error soon, avoid lookup later self.catmap_other = self.catmap["Other"] # Catch error soon, avoid lookup later
with open(tagmap_filename, "r") as tagmap_fd: with io.open(tagmap_filename, "r", encoding="utf-8") as tagmap_fd:
self.tagmap = json.load(tagmap_fd) self.tagmap = json.load(tagmap_fd)
self.tagmap_other = self.catmap["Other"] # Catch error soon, avoid lookup later self.tagmap_other = self.tagmap["Other"] # Catch error soon, avoid lookup later
self.con = None self.con = None
self.connect()
def __str__(self):
return "%s(req=%s, host='%s', user='%s', dbname='%s', port=%d, retry_count=%d, retry_pause=%d, catmap_filename=\"%s\", tagmap_filename=\"%s\")" % (
type(self).__name__, type(self.req).__name__, self.host, self.user, self.dbname, self.port, self.retry_count, self.retry_pause, self.catmap_filename, self.tagmap_filename)
def connect(self): def connect(self):
self.con = my.connect(host=self.host, user=self.user, passwd=self.password, self.con = my.connect(
host=self.host, user=self.user, passwd=self.password,
db=self.dbname, port=self.port, cursorclass=mycursors.DictCursor) db=self.dbname, port=self.port, cursorclass=mycursors.DictCursor)
def close(self): def close(self):
try: try:
if self.con: if self.con:
self.con.close() self.con.close()
except Exception: except Exception:
pass pass
self.con = None
__del__ = close __del__ = close
def repeat(self):
""" Allows for graceful repeating of transactions self.retry_count
times. Unsuccessful attempts wait for self.retry_pause until
next attempt.
def query(self, *args, **kwargs): Meant for usage with context manager:
""" Execute query on self.con, reconnecting if necessary """
countdown = self.retry_count for attempt in self.repeat():
commit = kwargs.pop("commit", False) with attempt as db:
crs = kwargs.pop("crs", None) crs = db.query(...)
while True: # do something with crs
Note that it's not reentrant (as is not underlying MySQL
connection), so avoid nesting on the same MySQL object.
"""
self.retry_attempt = self.retry_count
while self.retry_attempt:
if self.retry_attempt != self.retry_count:
sleep(self.retry_pause)
self.retry_attempt -= 1
yield self
def __enter__(self):
""" Context manager protocol. Guarantees that transaction will
get either commited or rolled back in case of database
exception. Can be used with self.repeat(), or alone as:
with self as db:
crs = db.query(...)
# do something with crs
Note that it's not reentrant (as is not underlying MySQL
connection), so avoid nesting on the same MySQL object.
"""
if not self.retry_attempt:
self.retry_attempt = 0
return self
def __exit__(self, exc_type, exc_val, exc_tb):
""" Context manager protocol. If db exception is fired and
self.retry_attempt is not zero, it is only logged and
does not propagate, otherwise it propagates up. Also
open transaction is rolled back.
In case of no exception, transaction gets commited.
"""
if not exc_type:
self.con.commit()
self.retry_attempt = 0
else:
try: try:
if crs is None: if self.con:
self.con.rollback()
except my.Error:
pass
try:
self.close()
except my.Error:
pass
if self.retry_attempt:
self.log.info("Database error (%d attempts left): %s %s" % (self.retry_attempt, exc_type.__name__, exc_val))
return True
def query(self, *args, **kwargs):
if not self.con:
self.connect()
crs = self.con.cursor() crs = self.con.cursor()
self.log.debug("execute: %s %s" % (args, kwargs)) self.log.debug("execute: %s %s" % (args, kwargs))
crs.execute(*args, **kwargs) crs.execute(*args, **kwargs)
if commit:
self.con.commit()
return crs return crs
except my.OperationalError:
if not countdown:
raise
self.log.info("execute: Database down, trying to reconnect (%d attempts left)..." % countdown)
if countdown<self.retry_count:
sleep(self.retry_pause) # no need to melt down server on longer outage
self.close()
self.connect()
crs = None
countdown -= 1
def _get_comma_perc(self, l): def _get_comma_perc(self, l):
return ','.join(['%s'] * len(l)) return ','.join(['%s'] * len(l))
def _get_not(self, b): def _get_not(self, b):
return "" if b else "NOT" return "" if b else "NOT"
def get_client_by_name(self, cert_names=None, name=None, secret=None): def get_client_by_name(self, cert_names=None, name=None, secret=None):
query = ["SELECT * FROM clients WHERE valid = 1"] query = ["SELECT * FROM clients WHERE valid = 1"]
params = [] params = []
...@@ -566,15 +600,18 @@ class MySQL(ObjectBase): ...@@ -566,15 +600,18 @@ class MySQL(ObjectBase):
if cert_names: if cert_names:
query.append(" AND hostname IN (%s)" % self._get_comma_perc(cert_names)) query.append(" AND hostname IN (%s)" % self._get_comma_perc(cert_names))
params.extend(n.lower() for n in cert_names) params.extend(n.lower() for n in cert_names)
rows = self.query("".join(query), params, commit=True).fetchall()
for attempt in self.repeat():
with attempt as db:
rows = db.query("".join(query), params).fetchall()
if len(rows) > 1: if len(rows) > 1:
self.log.warn("get_client_by_name: query returned more than one result (cert_names = %s, name = %s, secret = %s): %s" % (cert_names, name, secret, ", ".join([str(Client(**row)) for row in rows]))) self.log.warning(
"get_client_by_name: query returned more than one result (cert_names = %s, name = %s, secret = %s): %s" % (
cert_names, name, secret, ", ".join([str(Client(**row)) for row in rows])))
return None return None
return Client(**rows[0]) if rows else None return Client(**rows[0]) if rows else None
def get_clients(self, id=None): def get_clients(self, id=None):
query = ["SELECT * FROM clients"] query = ["SELECT * FROM clients"]
params = [] params = []
...@@ -582,10 +619,11 @@ class MySQL(ObjectBase): ...@@ -582,10 +619,11 @@ class MySQL(ObjectBase):
query.append("WHERE id = %s") query.append("WHERE id = %s")
params.append(id) params.append(id)
query.append("ORDER BY id") query.append("ORDER BY id")
rows = self.query(" ".join(query), params, commit=True).fetchall() for attempt in self.repeat():
with attempt as db:
rows = db.query(" ".join(query), params).fetchall()
return [Client(**row) for row in rows] return [Client(**row) for row in rows]
def add_modify_client(self, id=None, **kwargs): def add_modify_client(self, id=None, **kwargs):
query = [] query = []
params = [] params = []
...@@ -608,49 +646,53 @@ class MySQL(ObjectBase): ...@@ -608,49 +646,53 @@ class MySQL(ObjectBase):
if id is not None: if id is not None:
query.append("WHERE id = %s") query.append("WHERE id = %s")
params.append(id) params.append(id)
crs = self.query(" ".join(query), params, commit=True) for attempt in self.repeat():
with attempt as db:
crs = db.query(" ".join(query), params)
newid = crs.lastrowid if id is None else id newid = crs.lastrowid if id is None else id
return newid return newid
def get_debug(self): def get_debug(self):
rows = self.query("SELECT VERSION() AS VER", commit=True).fetchall() for attempt in self.repeat():
tablestat = self.query("SHOW TABLE STATUS", commit=True).fetchall() with attempt as db:
rows = db.query("SELECT VERSION() AS VER").fetchall()
tablestat = db.query("SHOW TABLE STATUS").fetchall()
return { return {
"db": "MySQL", "db": "MySQL",
"version": rows[0]["VER"], "version": rows[0]["VER"],
"tables": tablestat "tables": tablestat
} }
def getMaps(self, section, variables): def getMaps(self, section, variables):
maps = [] maps = []
for v in variables: for v in variables:
try: try:
mapped = section[v] mapped = section[v]
except KeyError: except KeyError:
raise self.req.error(message="Wrong tag or category used in query.", error=422, raise self.req.error(
exc=sys.exc_info(), key=v) message="Wrong tag or category used in query.",
error=422, exc=sys.exc_info(), key=v)
maps.append(mapped) maps.append(mapped)
return set(maps) # unique return set(maps) # unique
def fetch_events(
def fetch_events(self, client, id, count, self, client, id, count,
cat=None, nocat=None, cat=None, nocat=None,
tag=None, notag=None, tag=None, notag=None,
group=None, nogroup=None): group=None, nogroup=None):
logging.debug("fetch_events: id=%i, count=%i, cat=%s, nocat=%s, tag=%s, notag=%s, group=%s, nogroup=%s" % (id, count, str(cat), str(nocat), str(tag), str(notag), str(group), str(nogroup)))
if cat and nocat: if cat and nocat:
raise self.req.error(message="Unrealizable conditions. Choose cat or nocat option.", error=422, raise self.req.error(
cat=cat, nocat=nocat) message="Unrealizable conditions. Choose cat or nocat option.",
error=422, cat=cat, nocat=nocat)
if tag and notag: if tag and notag:
raise self.req.error(message="Unrealizable conditions. Choose tag or notag option.", error=422, raise self.req.error(
tag=tag, notag=notag) message="Unrealizable conditions. Choose tag or notag option.",
error=422, tag=tag, notag=notag)
if group and nogroup: if group and nogroup:
raise self.req.error(message="Unrealizable conditions. Choose group or nogroup option.", error=422, raise self.req.error(
group=group, nogroup=nogroup) message="Unrealizable conditions. Choose group or nogroup option.",
error=422, group=group, nogroup=nogroup)
query = ["SELECT e.id, e.data FROM clients c RIGHT JOIN events e ON c.id = e.client_id WHERE e.id > %s"] query = ["SELECT e.id, e.data FROM clients c RIGHT JOIN events e ON c.id = e.client_id WHERE e.id > %s"]
params = [id or 0] params = [id or 0]
...@@ -672,10 +714,11 @@ class MySQL(ObjectBase): ...@@ -672,10 +714,11 @@ class MySQL(ObjectBase):
if group or nogroup: if group or nogroup:
subquery = [] subquery = []
for name in (group or nogroup): for name in (group or nogroup):
escaped_name = name.replace('&', '&&').replace("_", "&_").replace("%", "&%") # escape for LIKE
subquery.append("c.name = %s") # exact client subquery.append("c.name = %s") # exact client
params.append(name) params.append(name)
subquery.append("c.name LIKE %s") # whole subtree subquery.append("c.name LIKE CONCAT(%s, '.%%') ESCAPE '&'") # whole subtree
params.append(name + ".%") params.append(escaped_name)
query.append(" AND %s (%s)" % (self._get_not(group), " OR ".join(subquery))) query.append(" AND %s (%s)" % (self._get_not(group), " OR ".join(subquery)))
...@@ -684,7 +727,10 @@ class MySQL(ObjectBase): ...@@ -684,7 +727,10 @@ class MySQL(ObjectBase):
query_string = "".join(query) query_string = "".join(query)
row = self.query(query_string, params, commit=True).fetchall() row = None
for attempt in self.repeat():
with attempt as db:
row = db.query(query_string, params).fetchall()
if row: if row:
maxid = max(r['id'] for r in row) maxid = max(r['id'] for r in row)
...@@ -699,8 +745,9 @@ class MySQL(ObjectBase): ...@@ -699,8 +745,9 @@ class MySQL(ObjectBase):
# Note that we use Error object just for proper formatting, # Note that we use Error object just for proper formatting,
# but do not raise it; from client perspective invalid # but do not raise it; from client perspective invalid
# events get skipped silently. # events get skipped silently.
err = self.req.error(message="Unable to deserialize JSON event from db, id=%s" % r["id"], error=500, err = self.req.error(
exc=sys.exc_info(), id=r["id"]) message="Unable to deserialize JSON event from db, id=%s" % r["id"],
error=500, exc=sys.exc_info(), id=r["id"])
err.log(self.log, prio=logging.WARNING) err.log(self.log, prio=logging.WARNING)
events.append(e) events.append(e)
...@@ -709,20 +756,20 @@ class MySQL(ObjectBase): ...@@ -709,20 +756,20 @@ class MySQL(ObjectBase):
"events": events "events": events
} }
def store_events(self, client, events, events_raw): def store_events(self, client, events, events_raw):
crs = self.con.cursor()
try: try:
for attempt in self.repeat():
with attempt as db:
for event, raw_event in zip(events, events_raw): for event, raw_event in zip(events, events_raw):
self.query("INSERT INTO events (received,client_id,data) VALUES (NOW(), %s, %s)", lastid = db.query(
(client.id, raw_event), crs=crs) "INSERT INTO events (received,client_id,data) VALUES (NOW(), %s, %s)",
lastid = crs.lastrowid (client.id, raw_event)).lastrowid
catlist = event.get('Category', ["Other"]) catlist = event.get('Category', ["Other"])
cats = set(catlist) | set(cat.split(".", 1)[0] for cat in catlist) cats = set(catlist) | set(cat.split(".", 1)[0] for cat in catlist)
for cat in cats: for cat in cats:
cat_id = self.catmap.get(cat, self.catmap_other) cat_id = self.catmap.get(cat, self.catmap_other)
self.query("INSERT INTO event_category_mapping (event_id,category_id) VALUES (%s, %s)", (lastid, cat_id), crs=crs) db.query("INSERT INTO event_category_mapping (event_id,category_id) VALUES (%s, %s)", (lastid, cat_id))
nodes = event.get('Node', []) nodes = event.get('Node', [])
tags = [] tags = []
...@@ -730,97 +777,85 @@ class MySQL(ObjectBase): ...@@ -730,97 +777,85 @@ class MySQL(ObjectBase):
tags.extend(node.get('Type', [])) tags.extend(node.get('Type', []))
for tag in set(tags): for tag in set(tags):
tag_id = self.tagmap.get(tag, self.tagmap_other) tag_id = self.tagmap.get(tag, self.tagmap_other)
self.query("INSERT INTO event_tag_mapping (event_id,tag_id) VALUES (%s, %s)", (lastid, tag_id), crs=crs) db.query("INSERT INTO event_tag_mapping (event_id,tag_id) VALUES (%s, %s)", (lastid, tag_id))
self.con.commit()
return [] return []
except Exception as e: except Exception as e:
self.con.rollback() exception = self.req.error(message="DB error", error=500, exc=sys.exc_info(), env=self.req.env)
exception = self.req.error(message="DB error", error=500, exc=sys.exc_info(), env=env)
exception.log(self.log) exception.log(self.log)
return [{"error": 500, "message": "DB error %s" % type(e).__name__}] return [{"error": 500, "message": "DB error %s" % type(e).__name__}]
def insertLastReceivedId(self, client, id): def insertLastReceivedId(self, client, id):
self.log.debug("insertLastReceivedId: id %i for client %i(%s)" % (id, client.id, client.hostname)) self.log.debug("insertLastReceivedId: id %i for client %i(%s)" % (id, client.id, client.hostname))
try: for attempt in self.repeat():
self.query("INSERT INTO last_events(client_id, event_id, timestamp) VALUES(%s, %s, NOW())", (client.id, id)) with attempt as db:
self.con.commit() db.query("INSERT INTO last_events(client_id, event_id, timestamp) VALUES(%s, %s, NOW())", (client.id, id))
except Exception as e:
self.con.rollback()
raise
def getLastEventId(self): def getLastEventId(self):
row = self.query("SELECT MAX(id) as id FROM events", commit=True).fetchall()[0] for attempt in self.repeat():
with attempt as db:
return row['id'] or 0 row = db.query("SELECT MAX(id) as id FROM events").fetchall()[0]
return row['id'] or 1
def getLastReceivedId(self, client): def getLastReceivedId(self, client):
res = self.query("SELECT event_id as id FROM last_events WHERE client_id = %s ORDER BY last_events.id DESC LIMIT 1", client.id, commit=True).fetchall() for attempt in self.repeat():
with attempt as db:
res = db.query(
"SELECT event_id as id FROM last_events WHERE client_id = %s ORDER BY last_events.id DESC LIMIT 1",
(client.id,)).fetchall()
try: try:
row = res[0] row = res[0]
except IndexError: except IndexError:
id = None id = None
self.log.debug("getLastReceivedId: probably first access, unable to get id for client %i(%s)" % (client.id, client.hostname)) self.log.debug("getLastReceivedId: probably first access, unable to get id for client %i(%s)" % (
client.id, client.hostname))
else: else:
id = row["id"] id = row["id"]
self.log.debug("getLastReceivedId: id %i for client %i(%s)" % (id, client.id, client.hostname)) self.log.debug("getLastReceivedId: id %i for client %i(%s)" % (id, client.id, client.hostname))
return id return id
def load_maps(self): def load_maps(self):
crs = self.con.crs() with self as db:
try: db.query("DELETE FROM tags")
self.query("DELETE FROM tags", crs=crs) for tag, num in self.tagmap.items():
for tag, num in self.tagmap.iteritems(): db.query("INSERT INTO tags(id, tag) VALUES (%s, %s)", (num, tag))
self.query("INSERT INTO tags(id, tag) VALUES (%s, %s)", (num, tag), crs=crs) db.query("DELETE FROM categories")
self.query("DELETE FROM categories", crs=crs) for cat_subcat, num in self.catmap.items():
for cat_subcat, num in self.catmap.iteritems():
catsplit = cat_subcat.split(".", 1) catsplit = cat_subcat.split(".", 1)
category = catsplit[0] category = catsplit[0]
subcategory = catsplit[1] if len(catsplit) > 1 else None subcategory = catsplit[1] if len(catsplit) > 1 else None
self.query("INSERT INTO categories(id, category, subcategory, cat_subcat) VALUES (%s, %s, %s, %s)", db.query(
(num, category, subcategory, cat_subcat), crs=crs) "INSERT INTO categories(id, category, subcategory, cat_subcat) VALUES (%s, %s, %s, %s)",
self.con.commit() (num, category, subcategory, cat_subcat))
except Exception as e:
self.con.rollback()
raise
def purge_lastlog(self, days): def purge_lastlog(self, days):
try: with self as db:
self.query( return db.query(
"DELETE FROM last_events " "DELETE FROM last_events "
" USING last_events LEFT JOIN (" " USING last_events LEFT JOIN ("
" SELECT MAX(id) AS last FROM last_events" " SELECT MAX(id) AS last FROM last_events"
" GROUP BY client_id" " GROUP BY client_id"
" ) AS maxids ON last=id" " ) AS maxids ON last=id"
" WHERE timestamp < DATE_SUB(CURDATE(), INTERVAL %s DAY) AND last IS NULL", " WHERE timestamp < DATE_SUB(CURDATE(), INTERVAL %s DAY) AND last IS NULL",
days) (days,)).rowcount
affected = self.con.affected_rows()
self.con.commit()
except Exception as e:
self.con.rollback()
raise
return affected
def purge_events(self, days): def purge_events(self, days):
try: with self as db:
self.query( affected = 0
"DELETE FROM events WHERE received < DATE_SUB(CURDATE(), INTERVAL %s DAY)", id_ = db.query(
days) "SELECT MAX(id) as id"
affected = self.con.affected_rows() " FROM events"
self.con.commit() " WHERE received < DATE_SUB(CURDATE(), INTERVAL %s DAY)",
except Exception as e: (days,)
self.con.rollback() ).fetchall()[0]["id"]
raise if id_ is None:
return 0
affected = db.query("DELETE FROM events WHERE id <= %s", (id_,)).rowcount
db.query("DELETE FROM event_category_mapping WHERE event_id <= %s", (id_,))
db.query("DELETE FROM event_tag_mapping WHERE event_id <= %s", (id_,))
return affected return affected
def expose(read=1, write=0, debug=0): def expose(read=1, write=0, debug=0):
def expose_deco(meth): def expose_deco(meth):
...@@ -829,7 +864,7 @@ def expose(read=1, write=0, debug=0): ...@@ -829,7 +864,7 @@ def expose(read=1, write=0, debug=0):
meth.write = write meth.write = write
meth.debug = debug meth.debug = debug
if not hasattr(meth, "arguments"): if not hasattr(meth, "arguments"):
meth.arguments = meth.func_code.co_varnames[:meth.func_code.co_argcount] meth.arguments = get_method_params(meth)
return meth return meth
return expose_deco return expose_deco
...@@ -842,11 +877,6 @@ class Server(ObjectBase): ...@@ -842,11 +877,6 @@ class Server(ObjectBase):
self.auth = auth self.auth = auth
self.handler = handler self.handler = handler
def __str__(self):
return "%s(req=%s, auth=%s, handler=%s)" % (type(self).__name__, type(self.req).__name__, type(self.auth).__name__, type(self.handler).__name__)
def sanitize_args(self, path, func, args, exclude=["self", "post"]): def sanitize_args(self, path, func, args, exclude=["self", "post"]):
# silently remove internal args, these should never be used # silently remove internal args, these should never be used
# but if somebody does, we do not expose them by error message # but if somebody does, we do not expose them by error message
...@@ -866,7 +896,6 @@ class Server(ObjectBase): ...@@ -866,7 +896,6 @@ class Server(ObjectBase):
return args return args
def wsgi_app(self, environ, start_response, exc_info=None): def wsgi_app(self, environ, start_response, exc_info=None):
path = environ.get("PATH_INFO", "").lstrip("/") path = environ.get("PATH_INFO", "").lstrip("/")
self.req.reset(env=environ, path=path) self.req.reset(env=environ, path=path)
...@@ -894,8 +923,20 @@ class Server(ObjectBase): ...@@ -894,8 +923,20 @@ class Server(ObjectBase):
args = self.sanitize_args(path, method, args) args = self.sanitize_args(path, method, args)
# Based on RFC2616, section 4.4 we SHOULD respond with 400 (bad request) or 411
# (length required) if content length was not specified. We choose not to, to
# preserve compatibility with clients deployed in the wild, which use POST for
# all requests (even those without payload, with no specified content length).
# According to PEP3333, section "Input and Error Streams", the application SHOULD
# NOT attempt to read more data than specified by CONTENT_LENGTH. As stated in
# section "environ Variables", CONTENT_LENGTH may be empty (string) or absent.
try: try:
post_data = environ['wsgi.input'].read() content_length = int(environ.get('CONTENT_LENGTH', 0))
except ValueError:
content_length = 0
try:
post_data = environ['wsgi.input'].read(content_length)
except: except:
raise self.req.error(message="Data read error.", error=408, exc=sys.exc_info()) raise self.req.error(message="Data read error.", error=408, exc=sys.exc_info())
...@@ -913,10 +954,15 @@ class Server(ObjectBase): ...@@ -913,10 +954,15 @@ class Server(ObjectBase):
# Make sure everything is properly encoded - JSON and various function # Make sure everything is properly encoded - JSON and various function
# may spit out unicode instead of str and it gets propagated up (str # may spit out unicode instead of str and it gets propagated up (str
# + unicode = unicode). However, the right thing would be to be unicode # + unicode = unicode).
# correct among whole source and always decode on input (json module # For Python2 the right thing would be to be unicode correct among whole
# does that for us) and on output here. # source and always decode on input (json module does that for us) and
if isinstance(status, unicode): # on output here.
# For Python3 strings are internally unicode so no decoding on input is
# necessary. For output, "status" must be unicode string, "output" must
# be encoded bytes array, what is done here. Important: for Python 3 we
# define: unicode = str
if isinstance(status, unicode) and sys.version_info[0] < 3:
status = status.encode("utf-8") status = status.encode("utf-8")
if isinstance(output, unicode): if isinstance(output, unicode):
output = output.encode("utf-8") output = output.encode("utf-8")
...@@ -925,19 +971,17 @@ class Server(ObjectBase): ...@@ -925,19 +971,17 @@ class Server(ObjectBase):
self.req.reset() self.req.reset()
return [output] return [output]
__call__ = wsgi_app __call__ = wsgi_app
def json_wrapper(method): def json_wrapper(method):
def meth_deco(self, post, **args): def meth_deco(self, post, **args):
if "events" in method.func_code.co_varnames[0:method.func_code.co_argcount]: if "events" in get_method_params(method):
try: try:
events = json.loads(post) if post else None events = json.loads(post.decode('utf-8')) if post else None
except Exception as e: except Exception as e:
raise self.req.error(message="Deserialization error.", error=400, raise self.req.error(
message="Deserialization error.", error=400,
exc=sys.exc_info(), args=post, parser=str(e)) exc=sys.exc_info(), args=post, parser=str(e))
if events: if events:
args["events"] = events args["events"] = events
...@@ -949,21 +993,21 @@ def json_wrapper(method): ...@@ -949,21 +993,21 @@ def json_wrapper(method):
# which could (although shouldn't) appear in handler code # which could (although shouldn't) appear in handler code
output = json.dumps(result, default=lambda v: str(v)) output = json.dumps(result, default=lambda v: str(v))
except Exception as e: except Exception as e:
raise self.req.error(message="Serialization error", error=500, raise self.req.error(message="Serialization error", error=500, exc=sys.exc_info(), args=str(result))
exc=sys.exc_info(), args=str(result))
return [('Content-type', 'application/json')], output return [('Content-type', 'application/json')], output
try: try:
meth_deco.arguments = method.arguments meth_deco.arguments = method.arguments
except AttributeError: except AttributeError:
meth_deco.arguments = method.func_code.co_varnames[:method.func_code.co_argcount] meth_deco.arguments = get_method_params(method)
return meth_deco return meth_deco
class WardenHandler(ObjectBase): class WardenHandler(ObjectBase):
def __init__(self, req, log, validator, db, auth, def __init__(
self, req, log, validator, db, auth,
send_events_limit=500, get_events_limit=1000, send_events_limit=500, get_events_limit=1000,
description=None): description=None):
...@@ -975,25 +1019,19 @@ class WardenHandler(ObjectBase): ...@@ -975,25 +1019,19 @@ class WardenHandler(ObjectBase):
self.get_events_limit = get_events_limit self.get_events_limit = get_events_limit
self.description = description self.description = description
def __str__(self):
return "%s(req=%s, validator=%s, db=%s, send_events_limit=%s, get_events_limit=%s, description=\"%s\")" % (
type(self).__name__, type(self.req).__name__, type(self.validator).__name__, type(self.db).__name__,
self.get_events_limit, self.send_events_limit, self.description)
@expose(read=1, debug=1) @expose(read=1, debug=1)
@json_wrapper @json_wrapper
def getDebug(self): def getDebug(self):
return { return {
"environment": self.req.env, "environment": self.req.env,
"client": self.req.client.__dict__, "client": self.req.client._asdict(),
"database": self.db.get_debug(), "database": self.db.get_debug(),
"system": { "system": {
"python": sys.version,
"uname": os.uname() "uname": os.uname()
}, },
"process": { "process": {
"cwd": os.getcwdu(), "cwd": unicode(os.getcwd()),
"pid": os.getpid(), "pid": os.getpid(),
"ppid": os.getppid(), "ppid": os.getppid(),
"pgrp": os.getpgrp(), "pgrp": os.getpgrp(),
...@@ -1005,7 +1043,6 @@ class WardenHandler(ObjectBase): ...@@ -1005,7 +1043,6 @@ class WardenHandler(ObjectBase):
} }
} }
@expose(read=1) @expose(read=1)
@json_wrapper @json_wrapper
def getInfo(self): def getInfo(self):
...@@ -1018,10 +1055,10 @@ class WardenHandler(ObjectBase): ...@@ -1018,10 +1055,10 @@ class WardenHandler(ObjectBase):
info["description"] = self.description info["description"] = self.description
return info return info
@expose(read=1) @expose(read=1)
@json_wrapper @json_wrapper
def getEvents(self, id=None, count=None, def getEvents(
self, id=None, count=None,
cat=None, nocat=None, cat=None, nocat=None,
tag=None, notag=None, tag=None, notag=None,
group=None, nogroup=None): group=None, nogroup=None):
...@@ -1035,7 +1072,7 @@ class WardenHandler(ObjectBase): ...@@ -1035,7 +1072,7 @@ class WardenHandler(ObjectBase):
# If client was already here, fetch server notion of his last id # If client was already here, fetch server notion of his last id
try: try:
id = self.db.getLastReceivedId(self.req.client) id = self.db.getLastReceivedId(self.req.client)
except Exception, e: except Exception as e:
self.log.info("cannot getLastReceivedId - " + type(e).__name__ + ": " + str(e)) self.log.info("cannot getLastReceivedId - " + type(e).__name__ + ": " + str(e))
if id is None: if id is None:
...@@ -1059,6 +1096,7 @@ class WardenHandler(ObjectBase): ...@@ -1059,6 +1096,7 @@ class WardenHandler(ObjectBase):
if self.get_events_limit: if self.get_events_limit:
count = min(count, self.get_events_limit) count = min(count, self.get_events_limit)
count = max(0, count)
res = self.db.fetch_events(self.req.client, id, count, cat, nocat, tag, notag, group, nogroup) res = self.db.fetch_events(self.req.client, id, count, cat, nocat, tag, notag, group, nogroup)
...@@ -1068,18 +1106,16 @@ class WardenHandler(ObjectBase): ...@@ -1068,18 +1106,16 @@ class WardenHandler(ObjectBase):
return res return res
def check_node(self, event, name): def check_node(self, event, name):
try: try:
ev_id = event['Node'][0]['Name'].lower() ev_id = event['Node'][0]['Name'].lower()
except (KeyError, TypeError): except (KeyError, TypeError, IndexError):
# Event does not bear valid Node attribute # Event does not bear valid Node attribute
return [{"error": 422, "message": "Event does not bear valid Node attribute"}] return [{"error": 422, "message": "Event does not bear valid Node attribute"}]
if ev_id != name: if ev_id != name:
return [{"error": 422, "message": "Node does not correspond with saving client"}] return [{"error": 422, "message": "Node does not correspond with saving client"}]
return [] return []
def add_event_nums(self, ilist, events, errlist): def add_event_nums(self, ilist, events, errlist):
for err in errlist: for err in errlist:
err.setdefault("events", []).extend(ilist) err.setdefault("events", []).extend(ilist)
...@@ -1088,12 +1124,11 @@ class WardenHandler(ObjectBase): ...@@ -1088,12 +1124,11 @@ class WardenHandler(ObjectBase):
event = events[i] event = events[i]
try: try:
id = event["ID"] id = event["ID"]
except (AttributeError, TypeError, ValueError): except (KeyError, TypeError, ValueError):
id = None id = None
ev_ids.append(id) ev_ids.append(id)
return errlist return errlist
@expose(write=1) @expose(write=1)
@json_wrapper @json_wrapper
def sendEvents(self, events=[]): def sendEvents(self, events=[]):
...@@ -1102,10 +1137,8 @@ class WardenHandler(ObjectBase): ...@@ -1102,10 +1137,8 @@ class WardenHandler(ObjectBase):
errs = [] errs = []
if len(events) > self.send_events_limit: if len(events) > self.send_events_limit:
errs.extend( errs.extend(self.add_event_nums(range(self.send_events_limit, len(events)), events, [
self.add_event_nums(range(self.send_events_limit, len(events)), events, {"error": 507, "message": "Too much events in one batch.", "send_events_limit": self.send_events_limit}]))
[{"error": 507, "message": "Too much events in one batch.",
"send_events_limit": self.send_events_limit}]))
saved = 0 saved = 0
events_tosend = [] events_tosend = []
...@@ -1122,15 +1155,20 @@ class WardenHandler(ObjectBase): ...@@ -1122,15 +1155,20 @@ class WardenHandler(ObjectBase):
errs.extend(self.add_event_nums([i], events, node_errs)) errs.extend(self.add_event_nums([i], events, node_errs))
continue continue
if self.req.client.test and not 'Test' in event.get('Category', []): if self.req.client.test and 'Test' not in event.get('Category', []):
errs.extend(self.add_event_nums([i], events, [{"error": 422, errs.extend(
self.add_event_nums([i], events, [{
"error": 422,
"message": "You're allowed to send only messages, containing \"Test\" among categories.", "message": "You're allowed to send only messages, containing \"Test\" among categories.",
"categories": event.get('Category', [])}])) "categories": event.get('Category', [])}]))
continue continue
raw_event = json.dumps(event) raw_event = json.dumps(event)
if len(raw_event) >= self.db.event_size_limit: if len(raw_event) >= self.db.event_size_limit:
errs.extend(self.add_event_nums([i], events, [{"error": 413, "message": "Event too long (>%i B)" % self.event_size_limit}])) errs.extend(
self.add_event_nums([i], events, [
{"error": 413, "message": "Event too long (>%i B)" % self.db.event_size_limit}
]))
continue continue
events_tosend.append(event) events_tosend.append(event)
...@@ -1151,32 +1189,32 @@ class WardenHandler(ObjectBase): ...@@ -1151,32 +1189,32 @@ class WardenHandler(ObjectBase):
return {"saved": saved} return {"saved": saved}
def read_ini(path): def read_ini(path):
c = ConfigParser.RawConfigParser() c = ConfigParser.RawConfigParser()
res = c.read(path) res = c.read(path)
if not res or not path in res: if not res or path not in res:
# We don't have loggin yet, hopefully this will go into webserver log # We don't have loggin yet, hopefully this will go into webserver log
raise Error(message="Unable to read config: %s" % path) raise Error(message="Unable to read config: %s" % path)
data = {} data = {}
for sect in c.sections(): for sect in c.sections():
for opts in c.options(sect): for opts in c.options(sect):
lsect = sect.lower() lsect = sect.lower()
if not lsect in data: if lsect not in data:
data[lsect] = {} data[lsect] = {}
data[lsect][opts] = c.get(sect, opts) data[lsect][opts] = c.get(sect, opts)
return data return data
def read_cfg(path): def read_cfg(path):
with open(path, "r") as f: with io.open(path, "r", encoding="utf-8") as f:
stripcomments = "\n".join((l for l in f if not l.lstrip().startswith(("#", "//")))) stripcomments = "\n".join((l for l in f if not l.lstrip().startswith(("#", "//"))))
conf = json.loads(stripcomments) conf = json.loads(stripcomments)
# Lowercase keys # Lowercase keys
conf = dict((sect.lower(), dict( conf = dict((
(subkey.lower(), val) for subkey, val in subsect.iteritems()) sect.lower(), dict(
) for sect, subsect in conf.iteritems()) (subkey.lower(), val) for subkey, val in subsect.items())
) for sect, subsect in conf.items())
return conf return conf
...@@ -1208,7 +1246,7 @@ section_order = ("log", "db", "auth", "validator", "handler", "server") ...@@ -1208,7 +1246,7 @@ section_order = ("log", "db", "auth", "validator", "handler", "server")
section_def = { section_def = {
"log": [FileLogger, SysLogger], "log": [FileLogger, SysLogger],
"db": [MySQL], "db": [MySQL],
"auth": [X509Authenticator, PlainAuthenticator, X509NameAuthenticator, X509MixMatchAuthenticator], "auth": [X509NameAuthenticator, PlainAuthenticator, X509Authenticator, X509MixMatchAuthenticator],
"validator": [JSONSchemaValidator, NoValidator], "validator": [JSONSchemaValidator, NoValidator],
"handler": [WardenHandler], "handler": [WardenHandler],
"server": [Server] "server": [Server]
...@@ -1264,7 +1302,7 @@ param_def = { ...@@ -1264,7 +1302,7 @@ param_def = {
"password": {"type": "str", "default": ""}, "password": {"type": "str", "default": ""},
"dbname": {"type": "str", "default": "warden3"}, "dbname": {"type": "str", "default": "warden3"},
"port": {"type": "natural", "default": 3306}, "port": {"type": "natural", "default": 3306},
"retry_pause": {"type": "natural", "default": 5}, "retry_pause": {"type": "natural", "default": 3},
"retry_count": {"type": "natural", "default": 3}, "retry_count": {"type": "natural", "default": 3},
"event_size_limit": {"type": "natural", "default": 5*1024*1024}, "event_size_limit": {"type": "natural", "default": 5*1024*1024},
"catmap_filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "catmap_mysql.json")}, "catmap_filename": {"type": "filepath", "default": path.join(path.dirname(__file__), "catmap_mysql.json")},
...@@ -1293,7 +1331,6 @@ def build_server(conf, section_order=section_order, section_def=section_def, par ...@@ -1293,7 +1331,6 @@ def build_server(conf, section_order=section_order, section_def=section_def, par
objects = {} # Already initialized objects objects = {} # Already initialized objects
# Functions for validation and conversion of config values # Functions for validation and conversion of config values
def facility(name): def facility(name):
return int(getattr(logging.handlers.SysLogHandler, "LOG_" + name.upper())) return int(getattr(logging.handlers.SysLogHandler, "LOG_" + name.upper()))
...@@ -1314,7 +1351,6 @@ def build_server(conf, section_order=section_order, section_def=section_def, par ...@@ -1314,7 +1351,6 @@ def build_server(conf, section_order=section_order, section_def=section_def, par
def obj(name): def obj(name):
return objects[name.lower()] return objects[name.lower()]
# Typedef dictionary # Typedef dictionary
conv_dict = { conv_dict = {
"facility": facility, "facility": facility,
...@@ -1325,9 +1361,8 @@ def build_server(conf, section_order=section_order, section_def=section_def, par ...@@ -1325,9 +1361,8 @@ def build_server(conf, section_order=section_order, section_def=section_def, par
"str": str "str": str
} }
def init_obj(sect_name): def init_obj(sect_name):
config = conf.get(sect_name, {}) config = dict(conf.get(sect_name, {}))
sect_name = sect_name.lower() sect_name = sect_name.lower()
sect_def = section_def[sect_name] sect_def = section_def[sect_name]
...@@ -1354,7 +1389,7 @@ def build_server(conf, section_order=section_order, section_def=section_def, par ...@@ -1354,7 +1389,7 @@ def build_server(conf, section_order=section_order, section_def=section_def, par
# Process parameters # Process parameters
kwargs = {} kwargs = {}
for name, definition in params.iteritems(): for name, definition in params.items():
raw_val = config.get(name, definition["default"]) raw_val = config.get(name, definition["default"])
try: try:
type_callable = conv_dict[definition["type"]] type_callable = conv_dict[definition["type"]]
...@@ -1398,12 +1433,11 @@ def build_server(conf, section_order=section_order, section_def=section_def, par ...@@ -1398,12 +1433,11 @@ def build_server(conf, section_order=section_order, section_def=section_def, par
return objects["server"] return objects["server"]
# Command line utilities # Command line utilities
def check_config(): def check_config():
# If we got so far, server object got set up fine # If we got so far, server object got set up fine
print >>sys.stderr, "Looks clear." print("Looks clear.", file=sys.stderr)
return 0 return 0
...@@ -1413,7 +1447,8 @@ def list_clients(id=None): ...@@ -1413,7 +1447,8 @@ def list_clients(id=None):
col_width = [max(len(val) for val in col) for col in zip(*(lines+[Client._fields]))] col_width = [max(len(val) for val in col) for col in zip(*(lines+[Client._fields]))]
divider = ["-" * l for l in col_width] divider = ["-" * l for l in col_width]
for line in [Client._fields, divider] + lines: for line in [Client._fields, divider] + lines:
print " ".join([val.ljust(width) for val, width in zip(line, col_width)]) print(" ".join([val.ljust(width) for val, width in zip(line, col_width)]))
return 0
def register_client(**kwargs): def register_client(**kwargs):
...@@ -1423,7 +1458,7 @@ def register_client(**kwargs): ...@@ -1423,7 +1458,7 @@ def register_client(**kwargs):
if kwargs["write"] is None: kwargs["write"] = 0 if kwargs["write"] is None: kwargs["write"] = 0
if kwargs["debug"] is None: kwargs["debug"] = 0 if kwargs["debug"] is None: kwargs["debug"] = 0
if kwargs["test"] is None: kwargs["test"] = 1 if kwargs["test"] is None: kwargs["test"] = 1
modify_client(id=None, **kwargs) return modify_client(id=None, **kwargs)
def modify_client(**kwargs): def modify_client(**kwargs):
...@@ -1433,7 +1468,7 @@ def modify_client(**kwargs): ...@@ -1433,7 +1468,7 @@ def modify_client(**kwargs):
return False return False
if hostname.endswith("."): # A single trailing dot is legal if hostname.endswith("."): # A single trailing dot is legal
hostname = hostname[:-1] # strip exactly one dot from the right, if present hostname = hostname[:-1] # strip exactly one dot from the right, if present
disallowed = re.compile("[^A-Z\d-]", re.IGNORECASE) disallowed = re.compile(r"[^A-Z\d-]", re.IGNORECASE)
return all( # Split by labels and verify individually return all( # Split by labels and verify individually
(label and len(label) <= 63 # length is within proper range (label and len(label) <= 63 # length is within proper range
and not label.startswith("-") and not label.endswith("-") # no bordering hyphens and not label.startswith("-") and not label.endswith("-") # no bordering hyphens
...@@ -1441,55 +1476,54 @@ def modify_client(**kwargs): ...@@ -1441,55 +1476,54 @@ def modify_client(**kwargs):
for label in hostname.split(".")) for label in hostname.split("."))
def isValidNSID(nsid): def isValidNSID(nsid):
allowed = re.compile("^(?:[a-zA-Z_][a-zA-Z0-9_]*\\.)*[a-zA-Z_][a-zA-Z0-9_]*$") allowed = re.compile(r"^(?:[a-zA-Z_][a-zA-Z0-9_]*\.)*[a-zA-Z_][a-zA-Z0-9_]*$")
return allowed.match(nsid) return allowed.match(nsid)
def isValidEmail(mail): def isValidEmail(mail):
mails = (email.utils.parseaddr(m) for m in mail.split(",")) allowed = re.compile(r"(^[a-zA-Z0-9_ .%!+-]*(?=<.*>))?(^|(<(?=.*(>))))[a-zA-Z0-9_.%!+-]+@[a-zA-Z0-9-.]+\4?$") # just basic check
allowed = re.compile("^[a-zA-Z0-9_.%!+-]+@[a-zA-Z0-9-.]+$") # just basic check valid = (allowed.match(ms.strip())for ms in mail.split(','))
valid = (allowed.match(ms[1]) for ms in mails)
return all(valid) return all(valid)
def isValidID(id): def isValidID(id):
client = server.handler.db.get_clients(id) client = server.handler.db.get_clients(id)
return client and True or False return client and True or False
if kwargs["name"] is not None: if kwargs["name"] is not None:
kwargs["name"] = kwargs["name"].lower() kwargs["name"] = kwargs["name"].lower()
if not isValidNSID(kwargs["name"]): if not isValidNSID(kwargs["name"]):
print >>sys.stderr, "Invalid client name \"%s\"." % kwargs["name"] print("Invalid client name \"%s\"." % kwargs["name"], file=sys.stderr)
return 254 return 254
if kwargs["hostname"] is not None: if kwargs["hostname"] is not None:
kwargs["hostname"] = kwargs["hostname"].lower() kwargs["hostname"] = kwargs["hostname"].lower()
if not isValidHostname(kwargs["hostname"]): if not isValidHostname(kwargs["hostname"]):
print >>sys.stderr, "Invalid hostname \"%s\"." % kwargs["hostname"] print("Invalid hostname \"%s\"." % kwargs["hostname"], file=sys.stderr)
return 254 return 253
if kwargs["requestor"] is not None and not isValidEmail(kwargs["requestor"]): if kwargs["requestor"] is not None and not isValidEmail(kwargs["requestor"]):
print >>sys.stderr, "Invalid requestor email \"%s\"." % kwargs["requestor"] print("Invalid requestor email \"%s\"." % kwargs["requestor"], file=sys.stderr)
return 254 return 252
if kwargs["id"] is not None and not isValidID(kwargs["id"]): if kwargs["id"] is not None and not isValidID(kwargs["id"]):
print >>sys.stderr, "Invalid id \"%s\"." % kwargs["id"] print("Invalid id \"%s\"." % kwargs["id"], file=sys.stderr)
return 254 return 251
for c in server.handler.db.get_clients(): for c in server.handler.db.get_clients():
if kwargs["name"] is not None and kwargs["name"].lower() == c.name: if kwargs["name"] is not None and kwargs["name"].lower() == c.name:
print >>sys.stderr, "Clash with existing name: %s" % str(c) print("Clash with existing name: %s" % str(c), file=sys.stderr)
return 254 return 250
if kwargs["secret"] is not None and kwargs["secret"] == c.secret: if kwargs["secret"] is not None and kwargs["secret"] == c.secret:
print >>sys.stderr, "Clash with existing secret: %s" % str(c) print("Clash with existing secret: %s" % str(c), file=sys.stderr)
return 254 return 249
newid = server.handler.db.add_modify_client(**kwargs) newid = server.handler.db.add_modify_client(**kwargs)
list_clients(id=newid) return list_clients(id=newid)
def load_maps(): def load_maps():
server.handler.db.load_maps() server.handler.db.load_maps()
return 0
def purge(days=30, lastlog=None, events=None): def purge(days=30, lastlog=None, events=None):
...@@ -1497,50 +1531,62 @@ def purge(days=30, lastlog=None, events=None): ...@@ -1497,50 +1531,62 @@ def purge(days=30, lastlog=None, events=None):
lastlog = events = True lastlog = events = True
if lastlog: if lastlog:
count = server.handler.db.purge_lastlog(days) count = server.handler.db.purge_lastlog(days)
print "Purged %d lastlog entries." % count print("Purged %d lastlog entries." % count)
if events: if events:
count = server.handler.db.purge_events(days) count = server.handler.db.purge_events(days)
print "Purged %d events." % count print("Purged %d events." % count)
return 0
def add_client_args(subargp, mod=False): def add_client_args(subargp, mod=False):
subargp.add_argument("--help", action="help", help="show this help message and exit") subargp.add_argument("--help", action="help", help="show this help message and exit")
if mod: if mod:
subargp.add_argument("-i", "--id", required=True, type=int, subargp.add_argument(
"-i", "--id", required=True, type=int,
help="client id") help="client id")
subargp.add_argument("-n", "--name", required=not mod, subargp.add_argument(
"-n", "--name", required=not mod,
help="client name (in dotted reverse path notation)") help="client name (in dotted reverse path notation)")
subargp.add_argument("-h", "--hostname", required=not mod, subargp.add_argument(
"-h", "--hostname", required=not mod,
help="client FQDN hostname") help="client FQDN hostname")
subargp.add_argument("-r", "--requestor", required=not mod, subargp.add_argument(
"-r", "--requestor", required=not mod,
help="requestor email") help="requestor email")
subargp.add_argument("-s", "--secret", subargp.add_argument(
"-s", "--secret",
help="authentication token (use explicit empty string to disable)") help="authentication token (use explicit empty string to disable)")
subargp.add_argument("--note", subargp.add_argument(
"--note",
help="client freetext description") help="client freetext description")
reg_valid = subargp.add_mutually_exclusive_group(required=False) reg_valid = subargp.add_mutually_exclusive_group(required=False)
reg_valid.add_argument("--valid", action="store_const", const=1, default=None, reg_valid.add_argument(
"--valid", action="store_const", const=1, default=None,
help="valid client (default)") help="valid client (default)")
reg_valid.add_argument("--novalid", action="store_const", const=0, dest="valid", default=None) reg_valid.add_argument("--novalid", action="store_const", const=0, dest="valid", default=None)
reg_read = subargp.add_mutually_exclusive_group(required=False) reg_read = subargp.add_mutually_exclusive_group(required=False)
reg_read.add_argument("--read", action="store_const", const=1, default=None, reg_read.add_argument(
"--read", action="store_const", const=1, default=None,
help="client is allowed to read (default)") help="client is allowed to read (default)")
reg_read.add_argument("--noread", action="store_const", const=0, dest="read", default=None) reg_read.add_argument("--noread", action="store_const", const=0, dest="read", default=None)
reg_write = subargp.add_mutually_exclusive_group(required=False) reg_write = subargp.add_mutually_exclusive_group(required=False)
reg_write.add_argument("--nowrite", action="store_const", const=0, dest="write", default=None, reg_write.add_argument(
"--nowrite", action="store_const", const=0, dest="write", default=None,
help="client is allowed to send (default - no)") help="client is allowed to send (default - no)")
reg_write.add_argument("--write", action="store_const", const=1, default=None) reg_write.add_argument("--write", action="store_const", const=1, default=None)
reg_debug = subargp.add_mutually_exclusive_group(required=False) reg_debug = subargp.add_mutually_exclusive_group(required=False)
reg_debug.add_argument("--nodebug", action="store_const", const=0, dest="debug", default=None, reg_debug.add_argument(
"--nodebug", action="store_const", const=0, dest="debug", default=None,
help="client is allowed receive debug output (default - no)") help="client is allowed receive debug output (default - no)")
reg_debug.add_argument("--debug", action="store_const", const=1, default=None) reg_debug.add_argument("--debug", action="store_const", const=1, default=None)
reg_test = subargp.add_mutually_exclusive_group(required=False) reg_test = subargp.add_mutually_exclusive_group(required=False)
reg_test.add_argument("--test", action="store_const", const=1, default=None, reg_test.add_argument(
"--test", action="store_const", const=1, default=None,
help="client is yet in testing phase (default - yes)") help="client is yet in testing phase (default - yes)")
reg_test.add_argument("--notest", action="store_const", const=0, dest="test", default=None) reg_test.add_argument("--notest", action="store_const", const=0, dest="test", default=None)
...@@ -1549,65 +1595,82 @@ def get_args(): ...@@ -1549,65 +1595,82 @@ def get_args():
import argparse import argparse
argp = argparse.ArgumentParser( argp = argparse.ArgumentParser(
description="Warden server " + VERSION, add_help=False) description="Warden server " + VERSION, add_help=False)
argp.add_argument("--help", action="help", argp.add_argument(
"--help", action="help",
help="show this help message and exit") help="show this help message and exit")
argp.add_argument("-c", "--config", argp.add_argument(
"-c", "--config",
help="path to configuration file") help="path to configuration file")
subargp = argp.add_subparsers(title="commands") subargp = argp.add_subparsers(title="commands", dest="command")
subargp.required = True
subargp_check = subargp.add_parser("check", add_help=False, subargp_check = subargp.add_parser(
"check", add_help=False,
description="Try to setup server based on configuration file.", description="Try to setup server based on configuration file.",
help="check configuration") help="check configuration")
subargp_check.set_defaults(command=check_config) subargp_check.set_defaults(command=check_config)
subargp_check.add_argument("--help", action="help", subargp_check.add_argument(
"--help", action="help",
help="show this help message and exit") help="show this help message and exit")
subargp_reg = subargp.add_parser("register", add_help=False, subargp_reg = subargp.add_parser(
"register", add_help=False,
description="Add new client registration entry.", description="Add new client registration entry.",
help="register new client") help="register new client")
subargp_reg.set_defaults(command=register_client) subargp_reg.set_defaults(command=register_client)
add_client_args(subargp_reg) add_client_args(subargp_reg)
subargp_mod = subargp.add_parser("modify", add_help=False, subargp_mod = subargp.add_parser(
"modify", add_help=False,
description="Modify details of client registration entry.", description="Modify details of client registration entry.",
help="modify client registration") help="modify client registration")
subargp_mod.set_defaults(command=modify_client) subargp_mod.set_defaults(command=modify_client)
add_client_args(subargp_mod, mod=True) add_client_args(subargp_mod, mod=True)
subargp_list = subargp.add_parser("list", add_help=False, subargp_list = subargp.add_parser(
"list", add_help=False,
description="List details of client registration entries.", description="List details of client registration entries.",
help="list registered clients") help="list registered clients")
subargp_list.set_defaults(command=list_clients) subargp_list.set_defaults(command=list_clients)
subargp_list.add_argument("--help", action="help", subargp_list.add_argument(
"--help", action="help",
help="show this help message and exit") help="show this help message and exit")
subargp_list.add_argument("--id", action="store", type=int, subargp_list.add_argument(
"--id", action="store", type=int,
help="client id", default=None) help="client id", default=None)
subargp_purge = subargp.add_parser("purge", add_help=False, subargp_purge = subargp.add_parser(
description= "purge", add_help=False,
description=(
"Purge old events or lastlog records." "Purge old events or lastlog records."
" Note that lastlog purge retains at least one newest record for each" " Note that lastlog purge retains at least one newest record for each"
" client, even if it is more than number of 'days' old.", " client, even if it is more than number of 'days' old."),
help="purge old events or lastlog records") help="purge old events or lastlog records")
subargp_purge.set_defaults(command=purge) subargp_purge.set_defaults(command=purge)
subargp_purge.add_argument("--help", action="help", subargp_purge.add_argument(
"--help", action="help",
help="show this help message and exit") help="show this help message and exit")
subargp_purge.add_argument("-l", "--lastlog", action="store_true", dest="lastlog", default=None, subargp_purge.add_argument(
"-l", "--lastlog", action="store_true", dest="lastlog", default=None,
help="purge lastlog records") help="purge lastlog records")
subargp_purge.add_argument("-e", "--events", action="store_true", dest="events", default=None, subargp_purge.add_argument(
"-e", "--events", action="store_true", dest="events", default=None,
help="purge events") help="purge events")
subargp_purge.add_argument("-d", "--days", action="store", dest="days", type=int, default=30, subargp_purge.add_argument(
"-d", "--days", action="store", dest="days", type=int, default=30,
help="records older than 'days' back from today will get purged") help="records older than 'days' back from today will get purged")
subargp_loadmaps = subargp.add_parser("loadmaps", add_help=False, subargp_loadmaps = subargp.add_parser(
description= "loadmaps", add_help=False,
description=(
"Load 'categories' and 'tags' table from 'catmap_mysql.json' and 'tagmap_mysql.json'." "Load 'categories' and 'tags' table from 'catmap_mysql.json' and 'tagmap_mysql.json'."
" Note that this is NOT needed for server at all, load them into db at will," " Note that this is NOT needed for server at all, load them into db at will,"
" should you need to run your own specific SQL queries on data directly." " should you need to run your own specific SQL queries on data directly."
" Note also that previous content of both tables will be lost.", " Note also that previous content of both tables will be lost."),
help="load catmap and tagmap into db") help="load catmap and tagmap into db")
subargp_loadmaps.set_defaults(command=load_maps) subargp_loadmaps.set_defaults(command=load_maps)
subargp_loadmaps.add_argument("--help", action="help", subargp_loadmaps.add_argument(
"--help", action="help",
help="show this help message and exit") help="show this help message and exit")
return argp.parse_args() return argp.parse_args()
...@@ -1622,6 +1685,6 @@ if __name__=="__main__": ...@@ -1622,6 +1685,6 @@ if __name__=="__main__":
del subargs["command"] del subargs["command"]
del subargs["config"] del subargs["config"]
if not server or server is fallback_wsgi: if not server or server is fallback_wsgi:
print >>sys.stderr, "Failed initialization, check configured log targets for reasons." print("Failed initialization, check configured log targets for reasons.", file=sys.stderr)
sys.exit(255) sys.exit(255)
sys.exit(command(**subargs)) sys.exit(command(**subargs))