Skip to content

Instantly share code, notes, and snippets.

@deeso
Last active August 17, 2020 21:09
Show Gist options
  • Save deeso/058af432419da1e722116e268497fa48 to your computer and use it in GitHub Desktop.
Save deeso/058af432419da1e722116e268497fa48 to your computer and use it in GitHub Desktop.
basic docker proxy for trying to honey pot
import base64
from time import sleep
import requests
import asyncio
from datetime import datetime
import hashlib
import socket
import random
import string
import json
import argparse
import logging
import threading
from multiprocessing import Process
import traceback
# require installation
import regex
import pymongo
from mongoengine import connect
from mongoengine import *
MY_IP = None
try:
MY_IP = requests.get("https://api.ipify.org/?format=json").json()['ip']
except:
pass
USING_EMAIL = False
DOCKER_HP_EMAIL = 'no-reply@docker-honeypot.localhost'
EMAIL_KARGS = {
"username": None,
"password": None,
"server": None,
"port": None,
"cc_list": None,
"subject": None,
}
USING_SLACK = False
SLACK_KARGS = {
"channel": None,
"username": 'docker_honyepot',
"webhook": None,
"icon_emoji": ":suspect:",
}
USING_WBX_TEAMS = False
WBX_TEAMS_WEBHOOK = None
WEBHOOK_PAYLOAD = {
"channel": None,
"username": 'docker_honyepot',
"text": None,
"icon_emoji": ":suspect:",
}
USING_MONGO = False
MAX_DATA = 2000000000
PORTS = [2375, 2376, 2377, 4243, 4244]
API = '1.16'
KEEP_WORKING = False
ERROR_MESSAGE = 'server error'
DEFAULT_SUBJECT = "[DOCKERPOT] Create Attempted {src_host} to {dst_host}"
DATABASE = 'docker_honeypot'
REQUESTS_COLLECTION = 'connections'
COMMANDS_COLLECTION = 'commands'
IMAGES_COLLECTION = 'images'
logging.basicConfig(level=logging.INFO, format='%(asctime)s :: %(levelname)s :: %(message)s')
parser = argparse.ArgumentParser()
parser.add_argument("-ports", help="ports to listen on", type=int, nargs='+', default=PORTS)
parser.add_argument("-terminate_with_error", help="send a server error after create API call", action="store_true", default=False)
parser.add_argument("-error_message", help="error message to send after create API call", default=ERROR_MESSAGE, type=str)
parser.add_argument("-use_mongo", help="use mongo", default=False, action='store_true')
parser.add_argument("-mongo_db", help="mongo database to connect to", default=DATABASE, type=str)
parser.add_argument("-mongo_host", help="mongo host to connect to", default='127.0.0.1', type=str)
parser.add_argument("-mongo_port", help="mongo port go connect to", default=27017, type=int)
parser.add_argument("-mongo_user", help="mongo username", default=None, type=str)
parser.add_argument("-mongo_pass", help="mongo password", default=None, type=str)
parser.add_argument("-email", help="notify about attempt", action='store_true', default=False)
parser.add_argument("-email_notify_subject", help="email subject line", default=DEFAULT_SUBJECT, type=str)
parser.add_argument("-email_server", help="email server", default="smtp.gmail.com", type=str)
parser.add_argument("-email_port", help="email port", default=587, type=int)
parser.add_argument("-email_username", help="email server", default=None, type=str)
parser.add_argument("-email_password", help="email password", default=None, type=str)
parser.add_argument("-email_cc_list", help="email cc list", nargs='+', default=None, type=str)
# parser.add_argument("-slack_token", help="someone to email when event happens", default=None, type=str)
parser.add_argument("-slack", help="notify about attempt", action='store_true', default=False)
parser.add_argument("-slack_channel", help="slack channel tp post too", default=None, type=str)
parser.add_argument("-slack_username", help="username for webhook", default='docker_honyepot', type=str)
parser.add_argument("-slack_webhook", help="webhook url", default=None, type=str)
parser.add_argument("-slack_emoticon", help="slack emoticon to use", default=":suspect:", type=str)
parser.add_argument("-wbx", help="notify about attempt", action='store_true', default=False)
parser.add_argument("-wbx_webhook", help="webhook url", default=None, type=str)
PING_RE = rb'^HEAD \/_ping HTTP\/1\.1.*'
GET_RE = rb'^GET .*'
GET_VERSION_RE = rb'^GET \/(?<api>v[0-9]+\.[0-9]+)\/version.*'
CREATE_RE = rb'^POST \/(?<api>v[0-9]+\.[0-9]+)\/containers\/create.*'
CREATE_IMAGE_RE = rb"^POST \/(?<api>v[0-9]+\.[0-9]+)\/create\?.*"
ATTACH_RE = rb'^POST \/(?<api>v[0-9]+\.[0-9]+)\/containers\/[0-9a-f]+\/attach.*'
WAIT_RE = rb"^POST \/(?<api>v[0-9]+\.[0-9]+)\/containers\/[0-9a-f]+\/wait\?condition=removed.*"
START_RE = rb'^POST \/(?<api>v[0-9]+\.[0-9]+)\/containers\/[0-9a-f]+\/start.*'
INFO_RE = rb'^GET \/(?<api>v[0-9]+\.[0-9]+)\/info HTTP/1.1'
GET_RETURN = b'''HTTP/1.1 200 OK\r\nApi-Version: {api}\r\nCache-Control: no-cache, no-store, must-revalidate\r\nContent-Type: text/plain; charset=utf-8\r\nDocker-Experimental: false\r\nOstype: linux\r\nPragma: no-cache\r\nServer: Docker/16.03.8 (linux)\r\nDate: {date}\r\n\r\n'''
PING_RETURN = b'''HTTP/1.1 200 OK\r\nApi-Version: {api}\r\nCache-Control: no-cache, no-store, must-revalidate\r\nContent-Length: {size}\r\nContent-Type: text/plain; charset=utf-8\r\nDocker-Experimental: false\r\nOstype: linux\r\nPragma: no-cache\r\nServer: Docker/16.03.8 (linux)\r\nDate: {date}\r\n\r\n'''
CREATE_RETURN = b'''HTTP/1.1 201 Created\r\nApi-Version: {api}\r\nContent-Type: application/json\r\nDocker-Experimental: false\r\nOstype: linux\r\nServer: Docker/16.03.8 (linux)\r\nDate: {date}\r\nContent-Length: 88\r\n\r\n{{"Id":"{docker_id}","Warnings":[]}}\r\n\r\n'''
CREATE_IMAGE_RETURN = b'''HTTP/1.1 200 Created\r\nApi-Version: {api}\r\nContent-Type: application/json\r\nDocker-Experimental: false\r\nOstype: linux\r\nServer: Docker/16.03.8 (linux)\r\nDate: {date}\r\nTransfer-Encoding: chunked\r\n0\r\n'''
WAIT_RETURN = b'''HTTP/1.1 200 OK\r\nApi-Version: {api}\r\nContent-Type: application/json\r\nDocker-Experimental: false\r\nOstype: linux\r\nServer: Docker/16.03.8 (linux)\r\nDate: {date}\r\nContent-Length: {size}\r\n\r\n{data}\r\n\r\n'''
ATTACH_RETURN = b'''HTTP/1.1 101 UPGRADED\r\nContent-Type: application/vnd.docker.raw-stream\r\nConnection: Upgrade\r\nUpgrade: tcp\r\n\r\n'''
ERROR_RETURN = b'''HTTP/1.1 500 Internal Server Error\r\nApi-Version: {api}\r\nContent-Type: application/json\r\nDocker-Experimental: false\r\nOstype: linux\r\nServer: Docker/16.03.8 (linux)\r\nDate: {date}\r\nContent-Length: {size}\r\n\r\n'''
ERROR_DATA = {"message":"server error"}
GET_VERSION_RETURN = b'''HTTP/1.1 200 OK\r\nApi-Version: {api}\r\nCache-Control: no-cache, no-store, must-revalidate\r\nContent-Length: {size}\r\nContent-Type: application/json; charset=utf-8\r\nDocker-Experimental: false\r\nOstype: linux\r\nPragma: no-cache\r\nServer: Docker/16.03.8 (linux)\r\nDate: {date}\r\n\r\n'''
GET_VERSION_DATA = {"Platform":{"Name":""},"Components":[{"Name":"Engine","Version":"16.03.8","Details":{"ApiVersion":"1.16","Arch":"amd64","BuildTime":"2015-01-18T21:26:54.000000000+00:00","Experimental":"false","GitCommit":"","GoVersion":"go1.0.8","KernelVersion":"2.4.0-42-generic","MinAPIVersion":"1.12","Os":"linux"}},{"Name":"containerd","Version":"1.0.0-0ubuntu2","Details":{"GitCommit":""}},{"Name":"runc","Version":"spec: 0.0.1-dev","Details":{"GitCommit":""}},{"Name":"docker-init","Version":"0.14.0","Details":{"GitCommit":""}}],"Version":"16.03.8","ApiVersion":"1.12","MinAPIVersion":"1.12","GitCommit":"","GoVersion":"go1.0.0","Os":"linux","Arch":"amd64","KernelVersion":"2.4.0-42-generic","BuildTime":"2015-01-18T21:26:54.000000000+00:00"}
INFO_RETURN = b'''HTTP/1.1 200 OK\r\nApi-Version: {api}\r\nContent-Type: application/json\r\nDocker-Experimental: false\r\nOstype: linux\r\nServer: Docker/16.03.8 (linux)\r\nDate: {date}\r\nContent-Length: {size}\r\n\r\n'''
INFO_DATA = {"ID":"","Containers":0,"ContainersRunning":1,"ContainersPaused":0,"ContainersStopped":9,"Images":6,"Driver":"aufs","DriverStatus":[["Root Dir","/var/lib/docker/aufs"],["Backing Filesystem","extfs"],["Dirs","141"],["Dirperm1 Supported","true"]],"SystemStatus":None,"Plugins":{"Volume":["local"],"Network":["bridge","host","ipvlan","macvlan","null","overlay"],"Authorization":None,"Log":["awslogs","fluentd","gcplogs","gelf","journald","json-file","local","logentries","splunk","syslog"]},"MemoryLimit":True,"SwapLimit":False,"KernelMemory":True,"KernelMemoryTCP":True,"CpuCfsPeriod":True,"CpuCfsQuota":True,"CPUShares":True,"CPUSet":True,"PidsLimit":True,"IPv4Forwarding":True,"BridgeNfIptables":True,"BridgeNfIp6tables":True,"Debug":False,"NFd":30,"OomKillDisable":True,"NGoroutines":41,"SystemTime":"{iso_date}","LoggingDriver":"json-file","CgroupDriver":"cgroupfs","NEventsListener":0,"KernelVersion":"5.4.0-42-generic","OperatingSystem":"Ubuntu 20.04 LTS","OSType":"linux","Architecture":"x86_64","IndexServerAddress":"https://index.docker.io/v1/","RegistryConfig":{"AllowNondistributableArtifactsCIDRs":[],"AllowNondistributableArtifactsHostnames":[],"InsecureRegistryCIDRs":["127.0.0.0/8"],"IndexConfigs":{"docker.io":{"Name":"docker.io","Mirrors":[],"Secure":True,"Official":True}},"Mirrors":[]},"NCPU":8,"MemTotal":33523802112,"GenericResources":None,"DockerRootDir":"/var/lib/docker","HttpProxy":"","HttpsProxy":"","NoProxy":"","Name":"mr-reimagined","Labels":[],"ExperimentalBuild":False,"ServerVersion":"16.03.8","ClusterStore":"","ClusterAdvertise":"","Runtimes":{"runc":{"path":"runc"}},"DefaultRuntime":"runc","Swarm":{"NodeID":"","NodeAddr":"","LocalNodeState":"inactive","ControlAvailable":False,"Error":"","RemoteManagers":None},"LiveRestoreEnabled":False,"Isolation":"","InitBinary":"docker-init","ContainerdCommit":{"ID":"","Expected":""},"RuncCommit":{"ID":"","Expected":""},"InitCommit":{"ID":"","Expected":""},"SecurityOptions":[],"Warnings":[]}
UNKNOWN_RETURN = b''
HAS_API = ['PING', 'GET_VERSION', 'CREATE', 'CREATE_IMAGE', 'WAIT', 'ATTACH', 'INFO', 'START']
WAIT_RETURN_DATA = b'{"Error":{random_string},"StatusCode":{random_string}}'
get_server_date = lambda : datetime.now().strftime('%a, %d %b %Y %H:%M:%S GMT')
get_docker_id = lambda : hashlib.sha256(get_server_date().encode('ascii')).hexdigest()
get_random_data = lambda : random_string_generator()
get_iso_time = lambda: datetime.now().isoformat()
PING = 'PING'
GET = 'GET'
CREATE = 'CREATE'
CREATE_IMAGE = 'CREATE_IMAGE'
WAIT = 'WAIT'
ATTACH = 'ATTACH'
INFO = 'INFO'
START = 'START'
GET_VERSION = 'GET_VERSION'
ERROR = 'ERROR'
UNKNOWN = 'UNKNOWN'
IDENTIFY = {
PING: regex.compile(PING_RE),
GET_VERSION: regex.compile(GET_VERSION_RE),
GET: regex.compile(GET_RE),
CREATE: regex.compile(CREATE_RE),
CREATE_IMAGE: regex.compile(CREATE_IMAGE_RE),
WAIT: regex.compile(WAIT_RE),
ATTACH: regex.compile(ATTACH_RE),
INFO: regex.compile(INFO_RE),
START: regex.compile(START_RE),
}
RESPONSES = {
PING: PING_RETURN,
GET: GET_RETURN,
GET_VERSION: GET_VERSION_RETURN,
CREATE: ERROR_RETURN,
CREATE_IMAGE: ERROR_RETURN,
WAIT: ERROR_RETURN,
ATTACH: ERROR_RETURN,
INFO: ERROR_RETURN,
START: ERROR_RETURN,
UNKNOWN: UNKNOWN_RETURN,
}
def random_string_generator(str_size=25, allowed_chars=string.ascii_letters + string.punctuation):
return ''.join(random.choice(allowed_chars) for x in range(str_size))
def extract_json_data(req):
data = None
if isinstance(req, bytes):
data_l = req.split(b'\r\n\r\n')
if len(data_l) > 0:
data = b"\r\n\r\n".join(data_l[1:])
else:
return None
if isinstance(req, str):
data_l = req.split('\r\n\r\n')
if len(data_l) > 0:
data = "\r\n\r\n".join(data_l[1:])
else:
return None
try:
if data:
return json.loads(data)
except:
raise
return None
def get_match_group(rtype, req):
if not rtype in IDENTIFY:
return {}
r = IDENTIFY[rtype].match(req)
if r is None:
return {}
return r.groupdict()
def generate_error(error_message='server error', api=API):
fmt = RESPONSES.get(ERROR, ERROR_RETURN)
ed = ERROR_DATA.copy()
ed['message'] = error_message
data = json.dumps(ed)
size = len(data)
kargs = {'api': api}
kargs.update({
'docker_id': get_docker_id(),
'date': get_server_date(),
'size': size,
'iso_date': get_iso_time(),
})
resp = fmt.decode('ascii').format(**kargs)+data
return resp
def create_response(rtype, req):
size = 0
data = b''
fmt = RESPONSES.get(rtype, GET_RETURN)
kargs = get_match_group(rtype, req)
kargs['api'] = API if not 'api' in kargs else kargs['api'].decode('ascii').lstrip('v')
if INFO_RETURN == fmt:
data = json.dumps(INFO_DATA).replace('{iso_date}', get_iso_time())
size = len(data)
elif WAIT_RETURN == fmt:
data = json.dumps(WAIT_RETURN_DATA).replace('{random_string}', get_random_data())
size = len(data)
elif ERROR_RETURN == fmt:
data = json.dumps(ERROR_DATA)
size = len(data)
elif GET_VERSION_RETURN == fmt:
data = json.dumps(GET_VERSION_DATA).replace('{api}', kargs['api'])
size = len(data)
kargs.update({
'docker_id': get_docker_id(),
'date': get_server_date(),
'size': size,
'iso_date': get_iso_time(),
})
if isinstance(data, bytes):
data = data.decode('ascii')
resp = fmt.decode('ascii').format(**kargs)+data
return resp
def get_docker_sock():
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect('/var/run/docker.sock')
return sock
def create_listener_sock(port):
s = socket.socket()
s.bind(('', port))
s.listen(10)
return s
def get_handler_type(data):
rtype = UNKNOWN
for name, re in IDENTIFY.items():
if re.match(data):
rtype = name
break
return rtype
def recv_until_done(client):
data = b''
while True:
new_data = b''
try:
new_data = client.recv(MAX_DATA)
except:
pass
if new_data == b'':
break
data = data + new_data
return data
async def consume_request(client, address, send_response=True):
create_data = None
src_ip, src_port = client.getpeername()
dst_ip, dst_port = client.getsockname()
created_at = get_iso_time()
data = recv_until_done(client) #(client.recv(MAX_DATA))
b64req = base64.b64encode(data).decode('ascii')
if data == b'':
logging.info("failed connection from: {}".format(address))
return {'src_ip': src_ip, 'src_port': src_port, 'dst_ip': dst_ip, 'dst_port': dst_port, "created_at":created_at,
'rtype': UNKNOWN, 'response': None, "request": b64req, "request_data": None, "api": None, 'sent': False}
rtype = get_handler_type(data)
logging.info("Handling connection from {}:{} for {}".format(address[0], address[1], rtype))
rdata = create_response(rtype, data)
if data.find(b'Content-Type: application/json\r\n') > -1:
create_data = extract_json_data(data)
kargs = get_match_group(rtype, data)
api = API if not 'api' in kargs else kargs['api'].decode('ascii').lstrip('v')
src_ip, src_port = client.getpeername()
dst_ip, dst_port = client.getsockname()
return {'src_ip': src_ip, 'src_port': src_port, 'dst_ip': dst_ip, 'dst_port': dst_port, "created_at":created_at,
'rtype': rtype, 'response': rdata, "request": b64req, 'request_data': create_data, 'api': api, 'sent': False }
async def honeypot_connection(client, address, send_after_ping=False):
result = await consume_request(client, address)
results = [result]
# facilitate follow-on docker client communication
logging.info("Handled connection type:{} from {}:{}".format(result['rtype'], address[0], address[1]))
if result['rtype'] == PING:
client.send(result['response'].encode('ascii'))
result['sent'] = True
# Nothing else to do, likely a port scanner
elif result['rtype'] == GET_VERSION or result['rtype'] == GET:
client.send(result['response'].encode('ascii'))
result['sent'] = True
return results
elif result['rtype'] == UNKNOWN:
client.send(UNKNOWN_RETURN)
result['sent'] = True
return results
result = await consume_request(client, address)
results.append(result)
if result['rtype'] and send_after_ping:
try:
client.send(result['response'].encode('ascii'))
result['sent'] = True
except:
pass
return results
class GeneralEvent(Document):
src_ip = StringField(required=True)
src_port = IntField(required=True)
dst_ip = StringField(required=True)
dst_port = IntField(required=True)
created_at = StringField(required=True)
rtype = StringField(required=True)
response = StringField(required=True)
request = StringField(required=True)
request_data = DictField()
api = StringField(required=True)
sent = BooleanField(required=True)
class CreateEvent(Document):
src_ip = StringField(required=True)
src_port = IntField(required=True)
dst_ip = StringField(required=True)
dst_port = IntField(required=True)
created_at = StringField(required=True)
command = StringField(required=True)
image = StringField(required=True)
async def add_mongo_results(results):
for result in results:
gr = GeneralEvent(**result)
ce = None
if gr.rtype == CREATE and gr.request_data:
kargs = {}
Cmd = gr.request_data.get('Cmd', [])
kargs['command'] = ' '.join(Cmd)
kargs['image'] = gr.request_data.get('Image', [])
for k in ['src_ip', 'dst_ip', 'src_port', 'dst_port', 'created_at']:
kargs[k] = result[k]
ce = CreateEvent(**kargs)
if USING_MONGO:
try:
gr.save()
except:
pass
if ce:
try:
ce.save()
except:
pass
async def add_elk_results(results):
# add results to elk from here
pass
async def send_slack_notifications(results):
payload = WEBHOOK_PAYLOAD.copy()
payload.update(SLACK_KARGS)
webhook_url = payload.get('webhook')
del payload['webhook']
logging.info("Loging results to slack: {}".format(USING_SLACK))
if not USING_SLACK:
return
for result in results:
if result['rtype'] == CREATE and result['request_data']:
kargs = result.copy()
r = result['request_data'].get('Cmd', [])
kargs['command'] = " ".join(r)
kargs['image'] = result['request_data'].get('Image', [])
kargs['dst_ip'] = MY_IP if MY_IP else kargs['dst_ip']
# message = ("{src_ip}:{src_port} => {dst_ip}:{dst_port} creating docker image:{image} for \'\'\'{command}\'\'\'".format(**kargs))
message = ("1. *Attempting to create an image for API: {api}* \n2. Source: *{src_ip}:{src_port}* \n3. Destination: *{dst_ip}:{dst_port}*\n4. Image: *{image}*\n5. Command: `{command}`".format(**kargs))
payload['text'] = "Alert: docker create"
blocks = [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message
}
}
]
payload['blocks'] = blocks
requests.post(webhook_url, data=json.dumps(payload), headers={'Content-Type': 'application/json'})
elif result['rtype'] == GET_VERSION:
kargs = result.copy()
kargs['dst_ip'] = MY_IP if MY_IP else kargs['dst_ip']
message = ("1. *Attempting recon for API: {api}* \n2. Source: *{src_ip}:{src_port}*\n3. Destination: *{dst_ip}:{dst_port}*".format(**kargs))
payload['text'] = "Alert: docker create"
blocks = [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message
}
}
]
payload['blocks'] = blocks
requests.post(webhook_url, data=json.dumps(payload), headers={'Content-Type': 'application/json'})
async def send_wbx_teams_notifications(results):
webhook_url = WBX_TEAMS_WEBHOOK
logging.info("Loging results to wbx_webhook: {}".format(WBX_TEAMS_WEBHOOK))
if WBX_TEAMS_WEBHOOK is None:
return
for result in results:
if result['rtype'] == CREATE and result['request_data']:
kargs = result.copy()
r = result['request_data'].get('Cmd', [])
kargs['command'] = " ".join(r)
kargs['image'] = result['request_data'].get('Image', [])
kargs['dst_ip'] = MY_IP if MY_IP else kargs['dst_ip']
message = ("1. **Attempting to create an image for API: {api}** \n2. **Source:** {src_ip}:{src_port} \n3. **Destination:** {dst_ip}:{dst_port}\n4. **Image:** {image}\n5. **Command:** `{command}`".format(**kargs))
logging.info("Sending results {} to wbx_webhook".format(result['rtype']))
requests.post(webhook_url, data=json.dumps({'markdown': message}), headers={'Content-Type': 'application/json'})
elif result['rtype'] == GET_VERSION:
kargs = result.copy()
kargs['dst_ip'] = MY_IP if MY_IP else kargs['dst_ip']
message = ("1. **Attempting recon for API:** {api}\n2. **Source:** {src_ip}:{src_port}\n3. **Destination:** {dst_ip}:{dst_port}".format(**kargs))
logging.info("Sending results {} to wbx_webhook".format(result['rtype']))
requests.post(webhook_url, data=json.dumps({'markdown': message}), headers={'Content-Type': 'application/json'})
async def honeypot_next_client(server, terminate_with_error=True, error_message=ERROR_MESSAGE):
client, address = server.accept()
client.settimeout(3.0)
results = await honeypot_connection(client, address)
if len(results) < 2 or results[0]['rtype'] != PING:
logging.info('Not a full honeypot connection')
elif terminate_with_error and len(results) >= 1 and results[0]['rtype'] == 'PING':
created_at = get_iso_time()
api = results[1]['api'] if results[1]['api'] else API
rdata = generate_error(api=api, error_message=error_message)
src_ip, src_port = client.getpeername()
dst_ip, dst_port = client.getsockname()
result = {'src_ip': src_ip, 'src_port': src_port, 'dst_ip': dst_ip, 'dst_port': dst_port, 'created_at': created_at,
'rtype': ERROR, 'response': rdata, 'request_data': None, 'api': api, 'sent': False}
results.append(result)
try:
client.send(rdata.encode('ascii'))
result = True
except:
pass
return results
async def send_emails(results):
if not USING_EMAIL and isinstance(EMAIL_KARGS['cc_list'], list) and len(EMAIL_KARGS['cc_list']) < 1 :
return
subject = EMAIL_KARGS['subject']
to = EMAIL_KARGS['cc_list'][0]
cc = EMAIL_KARGS['cc_list'][1:] if len(EMAIL_KARGS['cc_list']) > 1 else None
_from = DOCKER_HP_EMAIL
events = []
for result in results:
kargs = result.copy()
r = result['request_data'].get('Cmd', [])
kargs['command'] = " ".join(r)
kargs['image'] = result['request_data'].get('Image', [])
msg = ("{src_ip}:{src_port} creating image:{image} '''{command}'''".format(**kargs))
events.append(msg)
if len(events):
# configure simple email
# send email to server
pass
async def capture_results(results):
await add_mongo_results(results)
await add_elk_results(results)
await send_slack_notifications(results)
await send_wbx_teams_notifications(results)
# await send_emails(results)
for result in results:
if result['rtype'] == CREATE and result['request_data']:
kargs = result.copy()
r = result['request_data'].get('Cmd', [])
kargs['command'] = " ".join(r)
kargs['image'] = result['request_data'].get('Image', [])
logging.info("{src_ip}:{src_port} creating image:{image} '''{command}'''".format(**kargs))
async def doit(server, terminate_with_error, error_message):
try:
results = await honeypot_next_client(server, terminate_with_error=terminate_with_error, error_message=error_message)
await capture_results(results)
except:
traceback.print_exc()
async def serve_forever(server=None, terminate_with_error=True, error_message=ERROR_MESSAGE):
server = server if server else create_listener_sock(9090)
while KEEP_WORKING:
await doit(server, terminate_with_error, error_message)
def main(port, terminate_with_error, error_message, hostname=None, mport=None, database=None, username=None, password=None):
if USING_MONGO:
connect(host=hostname,
port=mport,
db=database,
username=username,
password=password)
loop = asyncio.get_event_loop()
loop.create_task(serve_forever(server=create_listener_sock(port),
terminate_with_error=terminate_with_error,
error_message=error_message))
loop.run_forever()
if __name__ == "__main__":
KEEP_WORKING = True
args = parser.parse_args()
dargs = vars(args)
terminate_with_error = args.terminate_with_error
error_message = args.error_message
hostname = None
port = None
username = None
password = None
database = None
mongo_kargs = {
"hostname":hostname,
"mport":port,
"username":username,
"password":password,
"database":database,
}
if dargs['use_mongo']:
USING_MONGO = True
mongo_kargs['hostname'] = dargs['mongo_host']
mongo_kargs['mport'] = dargs['mongo_port']
mongo_kargs['username'] = dargs['mongo_user']
mongo_kargs['password'] = dargs['mongo_pass']
mongo_kargs['database'] = dargs['mongo_db']
#connect(host=hostname,
# port=port,
# db=database,
# username=username,
# password=password)
if dargs['slack']:
SLACK_KARGS["channel"] = dargs["slack_channel"]
SLACK_KARGS["username"] = dargs["slack_username"]
SLACK_KARGS["webhook"] = dargs["slack_webhook"]
SLACK_KARGS["emoticon"] = dargs["slack_emoticon"]
can_doit = True
for k, v in SLACK_KARGS.items():
if v is None:
can_doit = False
break
USING_SLACK = can_doit
if dargs['wbx']:
WBX_TEAMS_WEBHOOK = dargs["wbx_webhook"]
USING_WBX_TEAMS = WBX_TEAMS_WEBHOOK is not None
if dargs['email']:
EMAIL_KARGS["username"] = dargs["email_username"]
EMAIL_KARGS["password"] = dargs["email_password"]
EMAIL_KARGS["server"] = dargs["email_server"]
EMAIL_KARGS["port"] = dargs["email_port"]
EMAIL_KARGS["cc_list"] = dargs["email_cc_list"]
EMAIL_KARGS["subject"] = dargs['email_notify_subject']
can_doit = True
for k in ['server', 'port', 'cc_list']:
if EMAIL_KARGS[k] is None:
can_doit = False
break
USING_EMAIL = can_doit
processes = []
try:
for port in dargs['ports']:
p = Process(target=main, args=(port, terminate_with_error, error_message), kwargs=mongo_kargs)
p.start()
processes.append(p)
while True:
if any([p.is_alive() for p in processes]):
sleep(5.0)
except KeyboardInterrupt:
KEEP_WORKING = False
for p in processes:
p.terminate()
[p.join() for p in processes]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment