Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save ncloudsbconnelly/3acdb0ea0407fcd5088439416fb35b93 to your computer and use it in GitHub Desktop.
Save ncloudsbconnelly/3acdb0ea0407fcd5088439416fb35b93 to your computer and use it in GitHub Desktop.
MonitoringTaskDefinition:
Type: "AWS::ECS::TaskDefinition"
Properties:
NetworkMode: host
Volumes:
- Name: docker_sock
Host:
SourcePath: /var/run/docker.sock
- Name: proc
Host:
SourcePath: /proc/
- Name: cgroup
Host:
SourcePath: /cgroup/
- Name: tmp
Host:
SourcePath: /tmp/
- Name: containers
Host:
SourcePath: /var/lib/docker/containers/
Family: logging-agent-task
ContainerDefinitions:
- Name: datadog-agent
Image: datadog/agent:latest
Cpu: 10
Memory: 256
Essential: true
MountPoints:
- ContainerPath: /var/run/docker.sock
SourceVolume: docker_sock
ReadOnly: true
- ContainerPath: /host/sys/fs/cgroup
SourceVolume: cgroup
ReadOnly: true
- ContainerPath: /host/proc
SourceVolume: proc
ReadOnly: true
Environment:
- Name: DD_API_KEY
Value: !Sub ${DDAPIKEY}
- Name: SD_BACKEND
Value: docker
- Name: DD_HOSTNAME
Value: !Sub ${AWS::StackName}
LogConfiguration:
LogDriver: json-file
Options:
max-size: 50m
- Name: logzio-docker
ReadonlyRootFilesystem: false
Image: 867872586470.dkr.ecr.us-east-1.amazonaws.com/fluentd_logzio_docker
Cpu: 10
Memory: 256
Essential: true
MountPoints:
- ContainerPath: /var/run/docker.sock
SourceVolume: docker_sock
ReadOnly: true
- ContainerPath: /tmp
SourceVolume: tmp
- ContainerPath: /var/lib/docker/containers
SourceVolume: containers
Environment:
- Name: LOGZIO_TOKEN
Value: !Sub ${LogzioToken}
- Name: LOGZ_IO_URL_1
Value: !Sub https://listener.logz.io:8071?token=${LogzioToken}
LogConfiguration:
LogDriver: json-file
Options:
max-size: 50m
FROM fluent/fluentd:stable
RUN apk add --update --virtual .build-deps \
sudo build-base ruby-dev \
&& sudo gem install \
fluent-plugin-concat \
fluent-plugin-logzio \
fluent-plugin-record-reformer \
fluent-plugin-record-modifier \
fluent-plugin-ecs-metadata-filter \
fluent-plugin-detect-exceptions \
&& sudo gem sources --clear-all \
&& apk del .build-deps \
&& rm -rf /var/cache/apk/* \
/home/fluent/.gem/ruby/2.3.0/cache/*.gem
COPY fluent.conf /fluentd/etc/fluent.conf
COPY entrypoint.sh /bin/entrypoint.sh
CMD fluentd -c /fluentd/etc/${FLUENTD_CONF} $FLUENTD_OPT
# This source tails the logs that are created by the Docker json-file logging driver
<source>
@type tail
tag reemoveme.ecs.*
path /var/lib/docker/containers/*/*-json.log
pos_file fluentd-docker.pos
format json
refresh_interval 10s
time_key time
time_format %Y-%m-%dT%H:%M:%S
</source>
# This matches the removeme tag to dectect exceptions.
<match reemoveme.**>
@type detect_exceptions
remove_tag_prefix reemoveme
message log
languages all
multiline_flush_interval 0.1
</match>
#This makes use of the ECS Metadata plugin to enrich records with ECS metadata
<filter **>
@type ecs_metadata
</filter>
#This match updates the tag to include all ECS metadata for routing
<match ecs.**>
@type record_reformer
tag ${record['ecs']}
</match>
#This copies the FluentD field log over to "message" field for the Kibana Discover page at Logzio
<match ecs.**>
@type record_reformer
renew_record false
enable_ruby false
tag logcopy
<record>
message ${log}
</record>
</match>
#This drops unnecessary keys (just log in this example, but could be extended to others)
<match logcopy>
@type record_reformer
remove_keys log
renew_record false
enable_ruby false
tag final
</match>
#This is for sending to a file for testing prior to sending to Logz.io
<match **>
@type logzio_buffered
endpoint_url "#{ENV['LOGZ_IO_URL_1']}"
output_include_time true
output_include_tags true
buffer_type file
buffer_path /tmp/logsz_buffer
flush_interval 1s
buffer_chunk_limit 1m # Logz.io has bulk limit of 10M. We recommend set this to 1M, to avoid oversized bulks
</match>
#This is for sending to a file for testing prior to sending to Logz.io
#<match **>
# @type file
# path /tmp/test0
# time_slice_format %Y%m%d
# time_slice_wait 10m
# time_format %Y%m%dT%H%M%S%z
# utc
#</match>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment