Skip to content
Snippets Groups Projects
Commit 8a7decfb authored by Daniel Studený's avatar Daniel Studený
Browse files

Copied Bodik's version of Cowrie connector from GitHub HaaS repository

parent be14b504
No related branches found
No related tags found
No related merge requests found
{
"warden": "warden_client.cfg",
"name": "cz.cesnet.server.kippo",
"secret": "",
"anonymised": "no",
"target_net": "195.113.0.0/16",
"dbhost": "localhost",
"dbuser": "kippo",
"dbpass": "kippopass",
"dbname": "kippo",
"dbport": 3306,
"awin": 5
}
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# na motivy kostejova romanu
from warden_client import Client, Error, read_cfg, format_timestamp
from time import time, gmtime, strftime
from math import trunc
from uuid import uuid4
import MySQLdb as my
import MySQLdb.cursors as mycursors
import subprocess, base64
import json
import string
import os
import sys
import warden_utils_flab as w3u
#warden client startup
aconfig = read_cfg('warden_client_cowrie.cfg')
wconfig = read_cfg('warden_client.cfg')
aclient_name = aconfig['name']
wconfig['name'] = aclient_name
aanonymised = aconfig['anonymised']
aanonymised_net = aconfig['target_net']
aanonymised = aanonymised if (aanonymised_net != '0.0.0.0/0') or (aanonymised_net == 'omit') else '0.0.0.0/0'
awin = aconfig['awin'] * 60
wclient = Client(**wconfig)
def gen_event_idea_cowrie_info(detect_time, src_ip, dst_ip, win_start_time, win_end_time, aggr_win, conn_count):
event = {
"Format": "IDEA0",
"ID": str(uuid4()),
"DetectTime": detect_time,
"WinStartTime": win_start_time,
"WinEndTime": win_end_time,
"Category": ["Attempt.Login"],
"Note": "SSH login attempt",
"ConnCount": conn_count,
"Source": [{}],
"Target": [{ "Proto": ["tcp", "ssh"], "Port": [22]}],
"Node": [
{
"Name": aclient_name,
"Type": ["Connection","Honeypot","Recon"],
"SW": ["Cowrie"],
"AggrWin": strftime("%H:%M:%S", gmtime(aggr_win))
}
]
}
event = w3u.IDEA_fill_addresses(event, src_ip, dst_ip, aanonymised, aanonymised_net)
return event
def gen_event_idea_cowrie_auth(detect_time, src_ip, dst_ip, username, password, sessionid):
event = {
"Format": "IDEA0",
"ID": str(uuid4()),
"DetectTime": detect_time,
"Category": ["Information.UnauthorizedAccess"],
"Note": "SSH successfull attempt",
"ConnCount": 1,
"Source": [{}],
"Target": [{ "Proto": ["tcp", "ssh"], "Port" : [22] }],
"Node": [
{
"Name": aclient_name,
"Type": ["Honeypot", "Connection", "Auth"],
"SW": ["Cowrie"],
}
],
"Attach": [{ "sessionid": sessionid, "username": username, "password": password }]
}
event = w3u.IDEA_fill_addresses(event, src_ip, dst_ip, aanonymised, aanonymised_net)
return event
def gen_event_idea_cowrie_ttylog(detect_time, src_ip, dst_ip, sessionid, ttylog, iinput):
event = {
"Format": "IDEA0",
"ID": str(uuid4()),
"DetectTime": detect_time,
"Category": ["Information.UnauthorizedAccess", "Test"],
"Note": "Cowrie ttylog",
"ConnCount": 1,
"Source": [{}],
"Target": [{ "Proto": ["tcp", "ssh"], "Port" : [22] }],
"Node": [
{
"Name": aclient_name,
"Type": ["Honeypot", "Data"],
"SW": ["Cowrie"],
}
],
"Attach": [ { "sessionid": sessionid, "ttylog": ttylog, "iinput": iinput, "smart": iinput } ]
}
event = w3u.IDEA_fill_addresses(event, src_ip, dst_ip, aanonymised, aanonymised_net)
return event
def gen_event_idea_cowrie_download(detect_time, src_ip, dst_ip, sessionid, url, outfile):
event = {
"Format": "IDEA0",
"ID": str(uuid4()),
"DetectTime": detect_time,
"Category": ["Malware"],
"Note": "Cowrie download",
"ConnCount": 1,
"Source": [{}],
"Target": [{ "Proto": ["tcp", "ssh"], "Port" : [22]}],
"Node": [
{
"Name": aclient_name,
"Type": ["Honeypot", "Data"],
"SW": ["Cowrie"],
}
],
"Attach": [{ "sessionid": sessionid, "url": url, "outfile": outfile, "smart": url }]
}
event = w3u.IDEA_fill_addresses(event, src_ip, dst_ip, aanonymised, aanonymised_net)
return event
def get_iinput(sessionid):
ret = []
query = "SELECT GROUP_CONCAT(input SEPARATOR '--SEP--') as i FROM input WHERE session=%s GROUP BY session;"
crs.execute(query, (sessionid,))
rows = crs.fetchall()
for row in rows:
ret.append(row["i"])
return ''.join(ret)
def get_ttylog(sessionid):
ret = ""
query = "SELECT id, session, ttylog FROM ttylog WHERE session=%s;"
crs.execute(query, (sessionid,))
rows = crs.fetchall()
for row in rows:
ret = subprocess.check_output(["/opt/cowrie/bin/playlog", "-m0", "../"+row['ttylog']])
#try to dumpit to json to see if there are some binary input and perhaps wrap it to base64
try:
a = json.dumps(ret)
except UnicodeDecodeError as e:
wclient.logger.warning("wraping binary content")
ret = base64.b64encode(ret)
return ret
con = my.connect( host=aconfig['dbhost'], user=aconfig['dbuser'], passwd=aconfig['dbpass'],
db=aconfig['dbname'], port=aconfig['dbport'], cursorclass=mycursors.DictCursor)
crs = con.cursor()
events = []
#kippo vs cowrie
#cowrie/core/dblog.py: def nowUnix(self):
#cowrie/core/dblog.py- """return the current UTC time as an UNIX timestamp"""
#cowrie/core/dblog.py- return int(time.time())
#kippo/core/dblog.py: def nowUnix(self):
#kippo/core/dblog.py- """return the current UTC time as an UNIX timestamp"""
#kippo/core/dblog.py- return int(time.mktime(time.gmtime()[:-1] + (-1,)))
# k sozalenju
# >>> int(time.mktime(time.gmtime()[:-1] + (-1,)))-int(time.time()) != 0
#old senders data
query = "SELECT UNIX_TIMESTAMP(s.starttime) as starttime, s.ip, COUNT(s.id) as attack_scale, sn.ip as sensor \
FROM sessions s \
LEFT JOIN sensors sn ON s.sensor=sn.id \
WHERE s.starttime > DATE_SUB(UTC_TIMESTAMP(), INTERVAL + %s SECOND) \
GROUP BY s.ip ORDER BY s.starttime ASC;"
crs.execute(query, (awin,))
etime = format_timestamp(time())
stime = format_timestamp(time() - awin)
rows = crs.fetchall()
for row in rows:
a = gen_event_idea_cowrie_info(
detect_time = format_timestamp(row['starttime']),
src_ip = row['ip'],
dst_ip = row['sensor'],
win_start_time = stime,
win_end_time = etime,
aggr_win = awin,
conn_count = row['attack_scale']
)
events.append(a)
#success login
query = "SELECT UNIX_TIMESTAMP(a.timestamp) as timestamp, s.ip as sourceip, sn.ip as sensor, a.session as sessionid, a.username as username, a.password as password \
FROM auth a JOIN sessions s ON s.id=a.session JOIN sensors sn ON s.sensor=sn.id \
WHERE a.success=1 AND a.timestamp > DATE_SUB(UTC_TIMESTAMP(), INTERVAL + %s SECOND) \
ORDER BY a.timestamp ASC;"
crs.execute(query, (awin,))
rows = crs.fetchall()
for row in rows:
a = gen_event_idea_cowrie_auth(
detect_time = format_timestamp(row['timestamp']),
src_ip = row['sourceip'],
dst_ip = row['sensor'],
username = row['username'],
password = row['password'],
sessionid = row['sessionid']
)
events.append(a)
#ttylog+iinput reporter
query = "SELECT UNIX_TIMESTAMP(s.starttime) as starttime, s.ip as sourceip, sn.ip as sensor, t.session as sessionid \
FROM ttylog t JOIN sessions s ON s.id=t.session JOIN sensors sn ON s.sensor=sn.id \
WHERE s.starttime > DATE_SUB(UTC_TIMESTAMP(), INTERVAL + %s SECOND) \
ORDER BY s.starttime ASC;"
crs.execute(query, (awin,))
rows = crs.fetchall()
for row in rows:
a = gen_event_idea_cowrie_ttylog(
detect_time = format_timestamp(row['starttime']),
src_ip = row['sourceip'],
dst_ip = row['sensor'],
sessionid = row['sessionid'],
ttylog = get_ttylog(row['sessionid']),
iinput = get_iinput(row['sessionid'])
)
events.append(a)
#download
query = "SELECT UNIX_TIMESTAMP(s.starttime) as starttime, s.ip as sourceip, sn.ip as sensor, d.session as sessionid, d.url as url, d.outfile as ofile \
FROM downloads d JOIN sessions s ON s.id=d.session JOIN sensors sn ON s.sensor=sn.id \
WHERE s.starttime > DATE_SUB(UTC_TIMESTAMP(), INTERVAL + %s SECOND) \
ORDER BY s.starttime ASC;"
crs.execute(query, (awin,))
rows = crs.fetchall()
for row in rows:
a = gen_event_idea_cowrie_download(
detect_time = format_timestamp(row['starttime']),
src_ip = row['sourceip'],
dst_ip = row['sensor'],
sessionid = row['sessionid'],
url = row['url'],
outfile = row['ofile']
)
events.append(a)
print "=== Sending ==="
start = time()
ret = wclient.sendEvents(events)
if 'saved' in ret:
wclient.logger.info("%d event(s) successfully delivered." % ret['saved'])
print "Time: %f" % (time() - start)
from os import stat, fstat
from os.path import exists, getsize
from datetime import datetime, timedelta
import gzip
import logging
import signal
import socket
import sys
import time
from optparse import OptionParser
__version__ = '0.5.3'
PY3 = sys.version_info[0] == 3
if PY3:
text_type = str
else:
text_type = unicode
def IDEA_fill_addresses(event, source_ip, destination_ip, anonymised, anonymised_net):
af = "IP4" if not ':' in source_ip else "IP6"
event['Source'][0][af] = [source_ip]
if anonymised != 'omit':
if anonymised == 'yes':
event['Target'][0]['Anonymised'] = True
event['Target'][0][af] = [anonymised_net]
else:
event['Target'][0][af] = [destination_ip]
return event
def hexdump(src, length = 16):
FILTER =''.join([(len(repr(chr(x)))==3) and chr(x) or '.' for x in range(256)])
N = 0
result=''
while src:
s,src = src[:length],src[length:]
hexa = ' '.join(["%02X"%ord(x) for x in s])
s = s.translate(FILTER)
result += "%04X %-*s %s\n" % (N, length*3, hexa, s)
N += length
return result
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
#0x8915 - SIOCGIFADDR
return socket.inet_ntoa(fcntl.ioctl(s.fileno(),0x8915,struct.pack('256s', ifname[:15]))[20:24])
def force_text(s, encoding='utf-8', errors='strict'):
if isinstance(s, text_type):
return s
return s.decode(encoding, errors)
class Pygtail(object):
"""
Creates an iterable object that returns only unread lines.
Keyword arguments:
offset_file File to which offset data is written (default: <logfile>.offset).
paranoid Update the offset file every time we read a line (as opposed to
only when we reach the end of the file (default: False)
copytruncate Support copytruncate-style log rotation (default: True)
"""
def __init__(self, filename, offset_file=None, paranoid=False,
copytruncate=True, wait_step=0.5, wait_timeout=20,
host_name=None):
self.filename = filename
self.paranoid = paranoid
self.copytruncate = copytruncate
self.wait_step = wait_step
self.wait_timeout = wait_timeout
self.time_waited = 0.0
self._offset_file = offset_file or "%s.offset" % self.filename
self._offset_file_inode = None
self._offset = None
self._dt_format = '%Y-%m-%dT%H:%M:%S.%f'
self._hostname = host_name or socket.gethostname().split('.')[0]
self._filename_format = '%(filename)s_%(host_name)s_%(log_hour)s.gz'
self._log_hour_format = '%Y%m%d%H'
self._fh = None
self._rotated_logfiles = []
self._catching_up = False
self._last_log = None
self._parse_offset_file()
if self._last_log:
self._rotated_logfiles = self._determine_rotated_logfiles()
self._catching_up = bool(self._rotated_logfiles)
if (self._offset_file_inode != stat(self.filename).st_ino) or \
(stat(self.filename).st_size < self._offset):
# Fail hard, this needs inspection
logging.fatal(
"File was truncated, but NO rotated files were created. inode:"
" %s offset: %s current size: %s timestamp: %s filename: %s",
self._offset_file_inode,
self._offset,
stat(self.filename).st_size,
self._last_log,
self.filename
)
sys.exit(1)
def __del__(self):
self._update_offset_file()
self._fh.close()
def __iter__(self):
return self
def next(self):
"""
Return the next line in the file, updating the offset.
"""
try:
line = self._get_next_line()
except StopIteration:
if self._catching_up:
logging.debug(
"Finished processing %s, moving to %s",
getattr(self._fh, 'filename') or getattr(self._fh, 'name'),
self._rotated_logfiles and self._rotated_logfiles[0] or self.filename
)
self._reload()
self._catching_up = bool(self._rotated_logfiles)
# Start on the next rotated file
try:
line = self._get_next_line()
except StopIteration: # oops, empty file
self._update_offset_file()
raise
else:
logging.debug("StopIteration at the main file, exiting")
self._update_offset_file()
raise
if self.paranoid:
self._update_offset_file()
return line
def __next__(self):
"""`__next__` is the Python 3 version of `next`"""
return self.next()
def readlines(self):
"""
Read in all unread lines and return them as a list.
"""
return [line for line in self]
def read(self):
"""
Read in all unread lines and return them as a single string.
"""
lines = self.readlines()
if lines:
try:
return ''.join(lines)
except TypeError:
return ''.join(force_text(line) for line in lines)
else:
return None
def _is_closed(self):
if not self._fh:
return True
try:
return self._fh.closed
except AttributeError:
if isinstance(self._fh, gzip.GzipFile):
# python 2.6
return self._fh.fileobj is None
else:
raise
def _parse_offset_file(self):
# if offset file exists and non-empty, open and parse it
if exists(self._offset_file) and getsize(self._offset_file):
offset_fh = open(self._offset_file, "r")
offset_data = [line.strip() for line in offset_fh]
offset_fh.close()
self._offset_file_inode = int(offset_data[0])
self._offset = int(offset_data[1])
self._last_log = datetime.strptime(offset_data[2], self._dt_format)
else:
self._offset = 0
def _get_offset(self):
if self._offset is None:
self._parse_offset_file()
return self._offset
def _filehandle(self):
"""
Return a filehandle to the file being tailed, with the position set
to the current offset.
"""
if not self._fh or self._is_closed():
if self._rotated_logfiles:
filename = self._rotated_logfiles.pop(0)
else:
filename = self.filename
if filename.endswith('.gz'):
self._fh = gzip.open(filename, 'r')
else:
self._fh = open(filename, "r")
self._fh.seek(self._get_offset())
return self._fh
def _update_offset_file(self):
"""
Update the offset file with the current inode and offset.
"""
offset = self._filehandle().tell()
inode = stat(self.filename).st_ino
fh = open(self._offset_file, "w")
fh.write(
"%s\n%s\n%s\n" % (
inode,
offset,
datetime.now().strftime(self._dt_format)
)
)
fh.close()
def _determine_rotated_logfiles(self):
"""
Looks up the rotated files and returns them.
"""
end = datetime.now().replace(minute=0, second=0, microsecond=0)
start = self._last_log.replace(minute=0, second=0, microsecond=0)
elapsed_hours = int((end - start).total_seconds()) / 60 / 60
if not elapsed_hours:
return []
files_list = []
while start < end:
candidate = self._filename_format % {
'filename': self.filename,
'host_name': self._hostname,
'log_hour': start.strftime(self._log_hour_format),
}
if exists(candidate):
files_list.append(candidate)
start += timedelta(hours=1)
return files_list
def _reload(self):
self._fh.close()
self._offset = 0
def _check_rotate_truncate(self):
fh = self._filehandle()
start_pos = fh.tell()
fh_ino = fstat(fh.fileno()).st_ino
try:
fh_stat = stat(self.filename)
except OSError:
logging.info("File moved, reloading...")
self._reload()
return
current_ino = fh_stat.st_ino
current_size = fh_stat.st_size
if fh_ino != current_ino:
logging.info("File rotated, reloading...")
self._reload()
if self.copytruncate and (current_size < start_pos):
logging.info("File truncated, reloading...")
self._reload()
def _wait_for_update(self):
while(self.time_waited < self.wait_timeout):
time.sleep(self.wait_step)
self.time_waited += self.wait_step
line = self._filehandle().readline()
if line:
self.time_waited = 0.0
return line
self._check_rotate_truncate()
else:
raise StopIteration
def _get_next_line(self):
line = self._filehandle().readline()
if not line:
if self._catching_up:
raise StopIteration
self._check_rotate_truncate()
return self._wait_for_update()
return line
def exit_handler(self, signal, frame):
logging.info("Received exit signal, shutting down...")
sys.exit(0)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment