Skip to content

Instantly share code, notes, and snippets.

@briceburg
Last active September 28, 2017 03:34
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save briceburg/fdfe51b64c918e2da951b278d9f57646 to your computer and use it in GitHub Desktop.
Save briceburg/fdfe51b64c918e2da951b278d9f57646 to your computer and use it in GitHub Desktop.
[patch] adds logs service, general fixes
diff --git a/ansible/playbooks/aws-swarm-mk.yml b/ansible/playbooks/aws-swarm-mk.yml
index f181725..a082932 100644
--- a/ansible/playbooks/aws-swarm-mk.yml
+++ b/ansible/playbooks/aws-swarm-mk.yml
@@ -166,3 +166,12 @@
docker_experimental_mode: yes
roles:
- docker-ce
+
+ tasks:
+ # @TODO hack for now - we ought to work out using 172. or 192. as default subnet
+ # https://github.com/moby/moby/issues/27399#issuecomment-256928125
+ - name: set resolver to 8.8.8.8
+ copy:
+ content: "nameserver 8.8.8.8"
+ dest: /etc/resolv.conf
+
diff --git a/ansible/playbooks/swarm-certificates.yml b/ansible/playbooks/swarm-certificates.yml
index 8fe5d36..4178d6f 100644
--- a/ansible/playbooks/swarm-certificates.yml
+++ b/ansible/playbooks/swarm-certificates.yml
@@ -11,7 +11,7 @@
regenerate: no
secrets_dir: "{{ playbook_dir }}/../../tmp"
tasks:
- - stat: path="{{ certificates_path }}/client-key.pem"
+ - stat: path="{{ certificates_path }}/key.pem"
register: client_key
- name: prepare certificate generation script
diff --git a/ansible/inventory/aws-phishme-homework/group_vars/all/swarm.yml b/ansible/inventory/aws-phishme-homework/group_vars/all/swarm.yml
index 0a28bf6..a7acbe1 100644
--- a/ansible/inventory/aws-phishme-homework/group_vars/all/swarm.yml
+++ b/ansible/inventory/aws-phishme-homework/group_vars/all/swarm.yml
@@ -2,3 +2,6 @@
swarm_manager_count: 1
swarm_worker_count: 1
+
+swarm_services:
+ logs:
diff --git a/services/logs/build/.gitignore b/services/logs/build/.gitignore
new file mode 100644
index 0000000..ddee285
--- /dev/null
+++ b/services/logs/build/.gitignore
@@ -0,0 +1 @@
+/fluent.conf
diff --git a/services/logs/build/Dockerfile b/services/logs/build/Dockerfile
new file mode 100644
index 0000000..10cc7ac
--- /dev/null
+++ b/services/logs/build/Dockerfile
@@ -0,0 +1,10 @@
+FROM fluent/fluentd:v0.14-onbuild
+
+RUN ( \
+ apk add --update --no-cache --virtual .build-deps \
+ build-base \
+ ruby-dev && \
+ gem install fluent-plugin-s3 &&\
+ gem sources --clear-all &&\
+ apk del .build-deps &&\
+true )
diff --git a/services/logs/build/fluent.conf.j2 b/services/logs/build/fluent.conf.j2
new file mode 100644
index 0000000..a3499b8
--- /dev/null
+++ b/services/logs/build/fluent.conf.j2
@@ -0,0 +1,54 @@
+# logs all events to stdout
+<filter **>
+ @type stdout
+</filter>
+
+<source>
+ @type forward
+ @id input1
+ @label @null
+ port 24224
+</source>
+
+# directs :5140 syslog trafffic to S3
+<source>
+ @type syslog
+ @id input2
+ @label @s3
+ port 5140
+ tag rsyslog
+</source>
+
+<label @s3>
+ <match **>
+ @type s3
+
+ auto_create_bucket false
+ store_as json
+
+ aws_key_id {{ swarm_services.logs.aws_access_key | d(lookup('env','AWS_ACCESS_KEY_ID')) }}
+ aws_sec_key {{ swarm_services.logs.aws_secret_key | d(lookup('env','AWS_SECRET_ACCESS_KEY')) }}
+ s3_region {{ swarm_services.logs.region | d(ec2_region) }}
+ s3_bucket {{ swarm_services.logs.bucket | d(project_name + '-swarm-logs') }}
+ path {{ swarm_services.logs.prefix | d('""') }}
+
+ <format>
+ @type json
+ </format>
+
+ <buffer tag,time>
+ @type file
+ path /fluentd/log/s3-{{ swarm_services.logs.bucket | d(project_name + '-swarm-logs') }}
+ timekey 3600 # 1 hour partition
+ timekey_wait 10m
+ timekey_use_utc true
+
+ </buffer>
+ </match>
+</label>
+
+<label @null>
+ <match **>
+ @type null
+ </match>
+</label>
diff --git a/services/logs/build/plugins/.gitkeep b/services/logs/build/plugins/.gitkeep
new file mode 100644
index 0000000..e69de29
diff --git a/services/logs/docker-compose.yml b/services/logs/docker-compose.yml
new file mode 100644
index 0000000..a7f9686
--- /dev/null
+++ b/services/logs/docker-compose.yml
@@ -0,0 +1,38 @@
+---
+# traefik load balancing across cluster. allows multiple services to share
+# :80 and :443 by routing based on Host header and other rules
+#
+# services must join the proxy-tier network to participate
+#
+version: '3'
+services:
+ fluentd:
+ image: gcr.io/briceburg-phishme/swarm-service:logs-build
+ build: ./build/
+ environment:
+ # @TODO resolve cyclical logging
+ LOGSPOUT: ignore
+ ports:
+ - "24224:24224" # for fluentd tcp traffic
+ - "5140:5140" # for rsyslog udp traffic
+ networks:
+ - default
+
+ logspout:
+ # https://hub.docker.com/r/gliderlabs/logspout/
+ image: gliderlabs/logspout:v3.2.3
+ command: 'syslog://fluentd:5140'
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock:ro
+ # depends_on is a "no-op" in docker swarm stack deploy...
+ #depends_on:
+ # we probably want a HEALTHCHECK in fluentd's dockerfile
+ # - fluentd
+ networks:
+ - default
+ deploy:
+ mode: global
+
+# networks:
+# proxy-tier:
+# external: true
diff --git a/services/logs/tasks/pre-deploy.yml b/services/logs/tasks/pre-deploy.yml
new file mode 100644
index 0000000..4b26b25
--- /dev/null
+++ b/services/logs/tasks/pre-deploy.yml
@@ -0,0 +1,22 @@
+---
+
+- name: "ensure {{ swarm_services.logs.bucket | d(project_name + '-swarm-logs') }} destination bucket in {{ swarm_services.logs.region | d(ec2_region) }}"
+ s3_bucket:
+ aws_access_key: "{{ swarm_services.logs.aws_access_key | d(lookup('env','AWS_ACCESS_KEY_ID')) }}"
+ aws_secret_key: "{{ swarm_services.logs.aws_secret_key | d(lookup('env','AWS_SECRET_ACCESS_KEY')) }}"
+ name: "{{ swarm_services.logs.bucket | d(project_name + '-swarm-logs') }}"
+ region: "{{ swarm_services.logs.region | d(ec2_region) }}"
+
+- name: render fluent.conf
+ template:
+ src: "{{ services_dir }}/{{ service }}/build/fluent.conf.j2"
+ dest: "{{ services_dir }}/{{ service }}/build/fluent.conf"
+ tags:
+ - build
+
+- name: "build and publish '{{ service }}'"
+ shell: docker-compose build --pull && docker-compose push
+ args:
+ chdir: "{{ services_dir }}/{{ service }}"
+ tags:
+ - build
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment