Skip to content

Instantly share code, notes, and snippets.

import json
import uuid
from exceptions import InvalidObject
class Manifest:
REQUIRED_ATTRIBUTES = []
OPTIONAL_ATTRIBUTES = ['executables', 'source_data']
import asyncio
import asyncio_redis
import json
import logging
FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
logging.basicConfig(format=FORMAT, level='INFO')
@kevinschoon
kevinschoon / Dockerfile
Last active August 29, 2015 14:17
Mesos
from mesosphere/mesos-slave:0.21.1-1.1.ubuntu1404
RUN apt-get update && apt-get install -y apt-transport-https
RUN echo deb https://get.docker.com/ubuntu docker main > /etc/apt/sources.list.d/docker.list
RUN apt-get update && apt-get install -y --force-yes lxc-docker
RUN echo 'docker,mesos' > /etc/mesos-slave/containerizers
RUN echo '5mins' > /etc/mesos-slave/executor_registration_timeout
@kevinschoon
kevinschoon / persistence.py
Last active August 29, 2015 14:16
S3 Persistence in a Docker Container
#!/usr/bin/env python3
"""
Simple script to maintain persistence between a local file system and an S3 bucket using the s3cmd commandline tool
and asyncio's event loop. A better version of this would likely use the Python Boto library and walk the file system
itself. This will run a job every five minutes using the s3cmd client to synchronize a local file system
with S3. This is meant to be run in a Docker volume container. This is NOT meant to store things such as database
files, etc. A better alternative is likely S3Fuse however that project is old and requires privileged access to
the Docker host.
"""
@kevinschoon
kevinschoon / beaver.conf
Created January 29, 2015 03:07
Watch stdout from Docker containers
[beaver]
format: json
logstash_version: 1
since_db_path: /var/log/since.db
transport: redis
redis_url: redis: {{ redis-target }}
redis_namespace: logstash:beaver
[/var/lib/docker/containers/**/*-json.log]
def good_exception():
open('notrealfile')
def bad_exception():
try:
open('notrealfile')
except IOError as e:
raise Exception(e)
#!/usr/bin/python
import os
import uuid
import redis
import multiprocessing
def flush_file(file_path, pointer):
print('Flushing file:', file_path)
class Vote(object):
"""
Votes are associated with destinations, each user is allowed to allocate 20
points to any of the four properties described below. The worst possible
score a destination may receive is zero.
"""
def __init__(self, max_points=20):
self.points = max_points
self.data = {
@kevinschoon
kevinschoon / gist:9468949
Created March 10, 2014 16:53
Search a Nested Python Dictionary
# Search through nested dictionaries
some_nested_dictionary = {'some_key': 'some string here', 'another_key': 'another string here',
'some_other_dict': {'yet_another_key': 'yet_another_string','some_list':
['list_item_one', 'list_item_two', 'list_item_three' ] } }
def locate_with_loop(search_path, search_dict):
result = search_dict
for i in search_path:
result = result[i]
@kevinschoon
kevinschoon / gist:8176565
Created December 30, 2013 00:47
Extract Cloudfront log data with Logstash
filter {
if [type] == "cloudfront" {
csv {
separator => " "
columns => [ "date", "time", "x-edge-location", "sc-bytes", "c-ip", "cs-method", "Host", "cs-uri-stem", "sc-status", "Referer", "User-Agent", "cs-uri-query", "Cookie", "x-edge-result-type", "x-edge-request-id", "x-host-header", "cs-protocol", "cs-bytes" ]
add_field => [ "listener_timestamp", "%{date} %{time}" ]
}
date {
type => "cloudfront"