Skip to content

Instantly share code, notes, and snippets.

View kosho's full-sized avatar

Kosho Owa kosho

View GitHub Profile
#
# Elastic Stack 6.3 Release Webinar
#
### SQL ###
# Create EMP table
PUT emp
{
# Rollup NYC Taxi index
# Create a rollup job
# It works for the documents already and going to be indexed
PUT _xpack/rollup/job/nyc-taxi-yellow
{
"index_pattern": "nyc-taxi-yellow-*",
"rollup_index": "nyc-taxi-yellow_rollup",
"cron": "*/30 * * * * ?",
@kosho
kosho / elasticsearch-rollup-apache.js
Last active June 14, 2018 07:11
Elasticsearch rollup operations
# Rollup apache sample index
GET apache_elastic_example
# Create a rollup job
# It works for the documents already and going to be indexed
PUT _xpack/rollup/job/apache_elastic_example
{
"index_pattern": "apache_elastic_example",
# Search the earthquake dataset with SQL
POST /_xpack/sql?format=txt
{
"query": "SELECT \"@timestamp\", mag, depth FROM \"ncedc-earthquakes\" where (mag > 7) ORDER BY \"@timestamp\" desc LIMIT 10"
}
# Translate SQL into QueryDSL
POST /_xpack/sql/translate
[
{
"_id": "Earthquake",
"_type": "dashboard",
"_source": {
"title": "Earthquake",
"hits": 0,
"description": "",
"panelsJSON": "[\n {\n \"col\": 1,\n \"id\": \"Earthquake-Depth-Timeseries\",\n \"panelIndex\": 2,\n \"row\": 11,\n \"size_x\": 12,\n \"size_y\": 2,\n \"type\": \"visualization\"\n },\n {\n \"col\": 4,\n \"id\": \"Earthquake-Heatmap\",\n \"panelIndex\": 3,\n \"row\": 1,\n \"size_x\": 9,\n \"size_y\": 6,\n \"type\": \"visualization\"\n },\n {\n \"col\": 1,\n \"id\": \"Earthquake-Title\",\n \"panelIndex\": 4,\n \"row\": 1,\n \"size_x\": 3,\n \"size_y\": 6,\n \"type\": \"visualization\"\n },\n {\n \"col\": 1,\n \"id\": \"Earthquake-Count\",\n \"panelIndex\": 5,\n \"row\": 7,\n \"size_x\": 6,\n \"size_y\": 2,\n \"type\": \"visualization\"\n },\n {\n \"col\": 1,\n \"id\": \"Earthquake-Count-per-Magnitude-Stacked\",\n \"panelIndex\": 6,\n \"row\": 9,\n \"size_x
pkill -SIGTERM -u $UID node
pkill -SIGTERM -F /tmp/es.pid
arch=$(echo `uname`-`uname -m` | tr '[:upper:]' '[:lower:]')
$1/elasticsearch-$1/bin/elasticsearch --daemonize --pidfile /tmp/es.pid -E path.data=$PWD/data-6 -E path.logs=$PWD/logs -E script.painless.regex.enabled=true -E path.repo=$PWD/snapshots -E cluster.indices.close.enable=true
$1//kibana-$1-$arch/bin/kibana serve --log-file $PWD/logs/kibana.log &
#!/bin/bash
arch=$(echo `uname`-`uname -m` | tr '[:upper:]' '[:lower:]')
mkdir $1
cd $1
curl https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-$1.tar.gz | tar zxf -
cd elasticsearch-$1
yes y | bin/elasticsearch-plugin install x-pack
#bin/elasticsearch-plugin install analysis-icu
bin/elasticsearch-plugin install analysis-kuromoji
cd ..
# -*- coding: utf-8 -*-
import sys
import codecs
import csv
import json
csvfile = open(sys.argv[1], 'rU')
header_reader = csv.reader(csvfile)
header = next(header_reader)
reader = csv.DictReader(csvfile, header)
T="T"
for d in `seq -f "%02g" 1 30`; do
for m in `seq -f "%02g" 0 23`; do
for h in `seq -f "%02g" 0 59`; do
echo "{\"index\" : {} }"
echo "{\"@timestamp\": \"2016-04-$d$T$m:$h:00Z\", \"status\": 100}"
done
done
done