Skip to content

Instantly share code, notes, and snippets.

@logpacker
Last active July 26, 2016 10:58
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save logpacker/e40f8694340709b64a96 to your computer and use it in GitHub Desktop.
Save logpacker/e40f8694340709b64a96 to your computer and use it in GitHub Desktop.
server.ini
; Server configuration. Agent doesn't need this config
; Also this config is used by REST API to find a proper connection to the Storage
; TCP server. Also it is a NetworkAPI endpoint, NetworkInfo will be returned by this endpoint if "cluster" string is sent
host=127.0.0.1
port=9999
; Comma-separated list of nodes in the cluster (no need to put current server here)
;cluster.nodes=
; Clean old logs from storage if necessary. In days, disabled by default
cleanolderthan=0
; Comma-separated list of enabled storages. Daemon will post messages to all listed storages
; providers=elasticsearch,mysql for multiple
; note: for ES 2.0+ please use elasticsearch2 provider
providers=elasticsearch
; PublicAPI is used to accept messages from any place (JS, SDK, etc.)
[PublicAPI]
; host all ipv4 addresses on the server
host=0.0.0.0
; port should be opened by your web server
port=9997
; Storages
; One or more storages can be used to save LogPacker data
; Define only Storages that you set in "providers=" option
[FileStorage]
; path is only a symlink to the latest file
path=/tmp/logpacker-storage.json
; new file will be created and symlink will be changed to it if limit is reached
limit=10000
[ElasticsearchStorage]
; https://www.elastic.co/guide/en/elasticsearch/guide/current/getting-started.html
host=localhost
port=9200
; Main index for logs, also logpacker_tags will be auto-created for tags
index=logpacker
[MysqlStorage]
; https://dev.mysql.com/usingmysql/get_started.html
; Connection string
addr=root@tcp(localhost:3306)/logpacker?charset=utf8
; Table will be created automatically
table=logpacker_event_collector
[PostgresqlStorage]
; http://www.postgresql.org/docs/9.3/static/tutorial-start.html
; Connection string
addr="host=localhost port=5432 user=postgres password= dbname=postgres sslmode=disable"
; Table will be created automatically
table=logpacker_event_collector
[MongodbStorage]
; https://docs.mongodb.org/getting-started/shell/
; Connection string, including user and pass if necessary
addr=mongodb://localhost:2701
; DB will be created automatically
dbname=logpacker
; collection will be created automatically
collection=logpacker_event_collector
[HbaseStorage]
; https://hbase.apache.org/book.html
; http://www.cloudera.com/documentation/cdh/5-0-x/CDH5-Installation-Guide/cdh5ig_hbase_standalone_start.html
; tcp address
addr=localhost:9090
; Table will be created automatically
table=logpacker_event_collector
[InfluxdbStorage]
; https://influxdb.com/docs/v0.9/introduction/getting_started.html
addr=http://127.0.0.1:8086
; User/Pass can be specified for connection
;user=
;pass=
; DB will be created automatically
dbname=logpacker
; Table will be created automatically
table=logpacker_event_collector
[KafkaStorage]
; http://kafka.apache.org/documentation.html
; https://www.digitalocean.com/community/tutorials/how-to-install-apache-kafka-on-ubuntu-14-04
; The Kafka brokers to connect to, as a comma separated list
; Note: LogPacker API doesn't work with Kafka provider, because Kafka is messaging system
brokers=127.0.0.1:9092
; Will be auto-created
topic=logpacker
; Optional verify ssl certificates chain
;verifyssl=
; The optional certificate file for client authentication
;cert.file=
; The optional key file for client authentication
;key.file=
; The optional certificate authority file for TLS client authentication
;ca.file=
[TarantoolStorage]
; http://tarantool.org/doc/getting_started.html
; Address with host and port, default is localhost:3013
addr=localhost:3013
; You can use admin user if you need
;user=
;pass=
; Space will be created automatically (connected user must have permissions to do it)
space=logpacker
[MemcachedStorage]
; Logpacker will save IDs into logpacker_keys item, tags into logpacker_tags, agents into logpacker_agents, and each unique message into logpacker_<ID>
; Comma-separated addresses can be used, for example: localhost:11211,localhost:11212
addr=127.0.0.1:11211
; Prefix is necessary to avoid key conflicts
prefix=logpacker
[ClickHouseStorage]
; https://clickhouse.yandex/tutorial.html
; tcp will be available soon
transport=http
; host:port
addr=localhost:8123
; default is logpacker
dbname=logpacker
; default is logpacker_event_collector
table=logpacker_event_collector
[PrestoDBStorage]
; https://prestodb.io/overview.html
; default schema must be created manually
; hive catalog can be changed to any other supported by PrestoDB
addr=presto://localhost:8080/hive/default
; Table will be created automatically
table=logpacker_event_collector
[HiveStorage]
; https://hive.apache.org/
; Only tcp mode is available right now, with SASL enabled
addr=localhost:10000
dbname=default
; Table will be created automatically
table=logpacker_event_collector
[KairosDBStorage]
; https://kairosdb.github.io/docs/build/html/index.html
; http address
addr=http://localhost:8080
; Will be auto-created
metric=logpacker
[OpentsDBStorage]
; http://opentsdb.net/docs/build/html/index.html
host=localhost
port=4242
metric=logpacker
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment