Skip to content

Instantly share code, notes, and snippets.

Avatar
🏠
Working from home

Megastef megastef

🏠
Working from home
  • Twitter: @seti321
  • Germany
View GitHub Profile
View sematextBrowserLogger.js
function sematextLogger (url, token) {
var sematextUrl = url || `https://logsene-receiver.sematext.com/${token}/_bulk/`
var logBuffer = ''
let consoleLog = console.log
let lineCount = 0
const sessionId = /SESS\w*ID=([^;]+)/i.test(document.cookie) ? RegExp.$1 : false
let consoleFunctions = {
debug: console.debug,
error: console.error,
warn: console.warn,
@megastef
megastef / test.html
Last active Jan 23, 2020
Log to Sematext From Web browser
View test.html
<html>
<body>
<script>
(function () {
const sematextLogToken = 'c0e39f27-xxxx-xxxx-9f27-8818a4f0b59d'
var sematextUrl = `https://logsene-receiver.sematext.com/${sematextLogToken}/_bulk/`
var logBuffer = ''
let consoleLog = console.log
let lineCount = 0
const sessionId = /SESS\w*ID=([^;]+)/i.test(document.cookie) ? RegExp.$1 : false
@megastef
megastef / demo.js
Created Nov 14, 2019
Ship metrics to with Node.js InfluxDB client to Sematext Cloud
View demo.js
const Influx = require('influx')
const os = require('os')
const influx = new Influx.InfluxDB({
host: 'spm-receiver.sematext.com',
port: 443,
database: 'metrics',
protocol: 'https'
})
setInterval(() => {
@megastef
megastef / st-agent-clusterrole.yml
Created Oct 2, 2019
Logagent with cluster role
View st-agent-clusterrole.yml
# st-agent-clusterrole.yml
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: sematext-agent
labels:
app: sematext-agent
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
View logagent.conf
options:
printStats: 60
suppress: true
geoipEnabled: true
diskBufferDir: /tmp/sematext-logagent
input:
docker:
module: docker-logs
socket: /var/run/docker.sock
@megastef
megastef / webserver.js
Created Jul 24, 2019
Webserver for winston test
View webserver.js
var Logsene = require('winston-logsene')
var winston = require('winston')
const {createLogger, format} = require('winston')
// example for custom rewriter, e.g. add myServerIp field to all logs
var myServerIp = '10.0.0.12'
var logger = createLogger({
levels: winston.config.npm.levels,
@megastef
megastef / sound-measure.sh
Created Apr 3, 2018
SOX, record sound, output sox stats decibel value as JSON
View sound-measure.sh
# Script to putput dB level from microphone
# output:
# {"db": -87.03}
# {"db": -86.87}
# ...
while [ 1 ]
do
sox -b 32 -e unsigned-integer -r 96k -c 2 -d --clobber --buffer $((96000*2*10)) /tmp/soxrecording.wav trim 0 1 2> /dev/null
sox /tmp/soxrecording.wav -n stats 2>&1 >/dev/null | grep "RMS lev dB" | awk '{print "{\"db\": " $4 "}"}'
done
@megastef
megastef / sound-measure.sh
Created Apr 3, 2018
SOX, record sound, output sox stats decibel value as JSON
View sound-measure.sh
# Script to putput dB level from microphone
# output:
# {"db": -87.03}
# {"db": -86.87}
# ...
while [ 1 ]
do
sox -b 32 -e unsigned-integer -r 96k -c 2 -d --clobber --buffer $((96000*2*10)) /tmp/soxrecording.wav trim 0 1 2> /dev/null
sox /tmp/soxrecording.wav -n stats 2>&1 >/dev/null | grep "RMS lev dB" | awk '{print "{\"db\": " $4 "}"}'
done
@megastef
megastef / change-kibana-index.js
Last active Sep 20, 2017
Convert Kibana Dashboard objects
View change-kibana-index.js
// usage: node change-kibana-index.js 'objects_file.json' 'wazuh-alerts-*' TOKEN_wazuh_alerts
var fs = require('fs')
var kibanaObjects = JSON.parse(fs.readFileSync(process.argv[2]))
var newKibanaObjects = kibanaObjects.map(function (o) {
if (o._source && o._source.kibanaSavedObjectMeta && o._source.kibanaSavedObjectMeta.searchSourceJSON) {
var source = JSON.parse(o._source.kibanaSavedObjectMeta.searchSourceJSON)
if (source.index === process.argv[3]) {
source.index = process.argv[4]
}
@megastef
megastef / prom2elk.yml
Last active Oct 4, 2017
Prometheus metrics to Elasticsearch via logagent-js
View prom2elk.yml
input:
docker-prometheus:
module: command
# connect to metrics endpoint
command: curl http://127.0.0.1:9323/metrics
sourceName: prometheus_metrics
debug: false
# scrape data every 10 seconds
restart: 10