Skip to content

Instantly share code, notes, and snippets.

@eunsukimme
Last active March 7, 2024 09:29
Show Gist options
  • Star 9 You must be signed in to star a gist
  • Fork 4 You must be signed in to fork a gist
  • Save eunsukimme/fdc47cbf1743e77932778ef5d227cf16 to your computer and use it in GitHub Desktop.
Save eunsukimme/fdc47cbf1743e77932778ef5d227cf16 to your computer and use it in GitHub Desktop.
Configuration Files for Logging System wigh Docker-elk, Filebeat and Node.js
version: "3.2"
services:
elasticsearch:
build:
context: elasticsearch/
args:
ELK_VERSION: $ELK_VERSION
volumes:
- type: bind
source: ./elasticsearch/config/elasticsearch.yml
target: /usr/share/elasticsearch/config/elasticsearch.yml
read_only: true
- type: volume
source: elasticsearch
target: /usr/share/elasticsearch/data
ports:
- "9200:9200"
- "9300:9300"
environment:
ES_JAVA_OPTS: "-Xmx256m -Xms256m"
discovery.type: single-node
networks:
- elk
logstash:
build:
context: logstash/
args:
ELK_VERSION: $ELK_VERSION
volumes:
- type: bind
source: ./logstash/config/logstash.yml
target: /usr/share/logstash/config/logstash.yml
read_only: true
- type: bind
source: ./logstash/pipeline
target: /usr/share/logstash/pipeline
read_only: true
ports:
- "5000:5000"
- "9600:9600"
environment:
LS_JAVA_OPTS: "-Xmx256m -Xms256m"
networks:
- elk
depends_on:
- elasticsearch
kibana:
build:
context: kibana/
args:
ELK_VERSION: $ELK_VERSION
volumes:
- type: bind
source: ./kibana/config/kibana.yml
target: /usr/share/kibana/config/kibana.yml
read_only: true
ports:
- "5601:5601"
networks:
- elk
depends_on:
- elasticsearch
app:
build:
context: ./src
volumes:
- log-data:/app/logs
ports:
- "4000:4000"
filebeat:
build:
context: ./filebeat
volumes:
- log-data:/var/log/server
networks:
- elk
depends_on:
- logstash
networks:
elk:
driver: bridge
volumes:
elasticsearch:
log-data:
FROM node:12
WORKDIR /app
COPY ./package*.json ./
RUN npm install
ENV PORT=4000
COPY . .
CMD ["npm","run","start"]
FROM docker.elastic.co/beats/filebeat:7.4.2
COPY filebeat.yml /usr/share/filebeat/filebeat.yml
USER root
RUN mkdir /var/log/server
RUN chown -R root /usr/share/filebeat
filebeat.inputs:
- type: log
enabled: true
paths:
- "/var/log/server/*.log"
output.logstash:
hosts: ["logstash:5000"]
# kibana/config/kibana.yml
server.name: kibana
server.host: 0.0.0.0
elasticsearch.hosts: ["http://elasticsearch:9200"]
monitoring.ui.container.elasticsearch.enabled: true
## X-Pack security credentials
elasticsearch.username: kibana_system # change
elasticsearch.password: jUqW7o3nknwjDgiuyChB # change
import winston from "winston";
import "winston-daily-rotate-file";
import path from "path";
import { fileURLToPath } from "url";
import { dirname } from "path";
// ES Module 에서는 __dirname 이 없으므로 이렇게 정의해준다
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const {
format: { combine, colorize, timestamp, json },
} = winston;
const fileTransport = new winston.transports.DailyRotateFile({
filename: "%DATE%.log",
datePattern: "YYYY-MM-DD",
maxSize: "20m",
maxFiles: "14d",
dirname: path.join(__dirname, "./logs"),
});
const logger = winston.createLogger({
level: "info",
format: combine(colorize(), timestamp(), json()),
transports: [fileTransport],
});
logger.stream = {
write: (message) => {
logger.info(message);
},
};
export default logger;
# logstash/pipeline/logstash.conf
input {
beats {
port => 5000
}
}
filter {
json {
source => "message" # logstash 의 message 필드는 morgan 이 생성한 json 형태이다
target => "parseJson" # 이 필드를 parseJson 이란 key 로 파싱하고, 그 결과를 value 로 저장한다
}
grok {
# parseJson 의 key 가 message 인 필드를 파싱한다(ip6, ip4, timestamp, verb, url, http version, status code)
match => {
"message" => "%{IPV6:ipv6}:%{IPV4:ipv4} - - \[%{DATA:parsed_timestamp}\] \\"%{WORD:verb} %{URIPATHPARAM:request} HTTP/%{NUMBER:httpversion}\\" %{NUMBER:response}"
}
}
# parsed_timestamp 의 format 을 ISO8601 로 변환한다
date {
match => [ "parsed_timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
}
}
# 로그 저장
output {
elasticsearch {
hosts => "elasticsearch:9200"
user => "elastic"
password => "GktDiZeZUJr5XDcb743C"
index => "test-log-%{+YYYY.MM.dd}"
}
}
# logstash/pipeline/logstash.conf
input {
beats {
port => 5000 # 5000번 포트로 들어오는 beat 요청을 핸들링합니다
}
}
# 로그를 elasticsearch에 저장합니다
output {
elasticsearch {
hosts => "elasticsearch:9200"
user => "elastic" # change
password => "XVppQ06rfKJv50QMUyxP" # change
index => "test-log-%{+YYYY.MM.dd}" # create new index
}
}
# logstash/config/logstash.yml
http.host: "0.0.0.0"
xpack.monitoring.elasticsearch.hosts: ["http://elasticsearch:9200"]
## X-Pack security credentials
xpack.monitoring.enabled: true
xpack.monitoring.elasticsearch.username: logstash_system # change
xpack.monitoring.elasticsearch.password: hZhaPfun06vzOb3Ydkoe # change
import express from "express";
import morgan from "morgan";
import logger from "./logger.js";
const PORT = process.env.PORT || 4000;
const app = express();
app.use(morgan("combined", { stream: logger.stream }));
app.use(express.static("public"));
app.get("/", (req, res) =>
res.status(200).json({ success: true, data: "Hello world!" })
);
app.listen(PORT, () => {
console.log(`Server listen on port: ${PORT}`);
});
export default app;
{
"type": "module",
"name": "src",
"version": "1.0.0",
"main": "main.js",
"repository": "https://github.com/deviantony/docker-elk.git",
"author": "eunsukimme <eunsu.dev@gmail.com>",
"license": "MIT",
"scripts": {
"start": "node main.js"
},
"dependencies": {
"express": "^4.17.1",
"morgan": "^1.10.0",
"winston": "^3.3.3",
"winston-daily-rotate-file": "^4.5.0"
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment