Skip to content

Instantly share code, notes, and snippets.

@junaid18183
Last active April 27, 2016 11:04
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save junaid18183/1a323e86edc1e61b16ddb05cbcd5a50a to your computer and use it in GitHub Desktop.
Save junaid18183/1a323e86edc1e61b16ddb05cbcd5a50a to your computer and use it in GitHub Desktop.
Dashing Widget for Elastic Search
This widget creats the dashing dashboard for Elastic Search Indices
<% content_for(:title) { "Techops Nagios Dashboard" } %>
<script type='text/javascript'>
$(function() {
Dashing.gridsterLayout('[{"col":1,"row":1,"size_x":1,"size_y":2},{"col":2,"row":1,"size_x":2,"size_y":2},{"col":3,"row":3,"size_x":1,"size_y":1},{"col":2,"row":3,"size_x":1,"size_y":1},{"col":1,"row":3,"size_x":1,"size_y":1},{"col":1,"row":4,"size_x":1,"size_y":1},{"col":2,"row":5,"size_x":1,"size_y":1},{"col":3,"row":4,"size_x":1,"size_y":1},{"col":2,"row":4,"size_x":1,"size_y":1}]')
});
</script>
<div class="gridster">
<ul>
<li data-row="1" data-col="1" data-sizex="1" data-sizey="2">
<div data-id="elastic_cluster_health" data-view="List" data-unordered="true" data-title="Cluster health" data-moreinfo="Elastic Cluster Health"></div>
</li>
<li data-row="1" data-col="1" data-sizex="2" data-sizey="2">
<div data-id="elastic_search_indices" data-view="Raw" data-title="elastic_search Indices" style="background-color:#ff9618" ></div>
</li>
</ul>
</div>
#!/usr/bin/env ruby
require 'rubygems'
require 'net/http'
require 'json'
@host = "search.juvensys.com"
@port = 80
##########################################################################################################################
def fetch(resource)
response = Net::HTTP.get_response(URI.parse("http://" + @host + ":" + @port.to_s + resource))
return response.body
end
##########################################################################################################################
def generate_pie (node_stats)
data = {}
name=node_stats['host'] # eaarlier used 'name' which is FQDN
memory=[["Used",node_stats['os']['mem']['used_percent'] ],["Free", node_stats['os']['mem']['free_percent'] ] ]
data [name] = {
"memory" => memory
}
return data
end
##########################################################################################################################
def parse_stats (node_stats)
data = {}
data['os'] = {
"memory" => node_stats['os']['mem']['used_percent']
}
data['jvm'] = {
"used" => node_stats['jvm']['mem']['heap_used_in_bytes'],
"committed" => node_stats['jvm']['mem']['heap_committed_in_bytes']
}
data['gc'] = {
"count" => node_stats['jvm']['gc']['collectors'].inject(0) {|sum, collector| sum += collector.last['collection_count']}
}
data['gc_time'] = {
"time" => node_stats['jvm']['gc']['collectors'].inject(0) {|sum, collector| sum += collector.last['collection_time_in_millis']}
}
data['cache'] = {
"field" => node_stats['indices']['fielddata']['memory_size_in_bytes'],
"filter" => node_stats['indices']['filter_cache']['memory_size_in_bytes'],
"id" => node_stats['indices']['id_cache']['memory_size_in_bytes']
}
data['docs'] = {
"count" => node_stats['indices']['docs']['count']
}
data['ops'] = {
"index" => node_stats['indices']['indexing']['index_total'],
"get" => node_stats['indices']['get']['total'],
"search" => node_stats['indices']['search']['query_total'],
"delete" => node_stats['indices']['indexing']['delete_total']
}
data['store'] = {
"size" => node_stats['indices']['store']['size_in_bytes']
}
return data
end
# +----------------------------------------------------------------------+
def get_cluster_health()
health = fetch('/_cat/health/?v')
header,table=create_row_hash(health)
table=table[0] #Since health will be list with single hash
es_health={}
table.keys.sort.each {|k|
es_health[k]={label: k, value: table[k]}
}
return header,es_health
end
# +----------------------------------------------------------------------+
def get_cluster_indices()
indices = fetch ('/_cat/indices/?v')
table=create_html_table(indices)
#indices=[]
#table.each {|row|
# i={}
# #note no Sorting here
# row.keys.each {|k|
# i[k]={label: k, value: row[k]}
# }
#indices << i
#indices.push[i] #DOnt know why push not working
#
#HTML TABLE
return table
end
# +----------------------------------------------------------------------+
def get_node_stats()
err = IO.new(2, "w")
stats = JSON.parse(fetch('/_nodes/stats/os,jvm,indices'))
#node_list = fetch ('/_cat/nodes/?v')
#cluster_stats=JSON.parse(fetch('/_cluster/stats?human&pretty'))
#fielddata=JSON.parse(fetch('/_stats/fielddata/?pretty=1'))
data={}
nodes=stats['nodes']
nodes.each {|k, v|
node_stats = v
node_name = node_stats['name']
#data = parse_stats (node_stats)
data=data.merge(generate_pie(node_stats))
}
return data
end
# +----------------------------------------------------------------------+
def create_row_hash (data)
data=data.lines # get each line in different array
header=data.slice!(-0) #This deletes the first line from data and makes header
header=header.split
rows = []
data.each {|line|
line=line.split
row = Hash[header.zip line]
rows.push(row)
}
return header,rows
end
# +----------------------------------------------------------------------+
def create_html_table (data)
table="<table border='1'>"
data.lines.each {|line|
color=line.split[0]
table=table+"<tr bgcolor="+color+">"
line.split.each {|i|
table=table+"<td>"+i+"</td>"
}
table=table+"</tr>"
}
table=table+"</table>"
return table
end
# +----------------------------------------------------------------------+
def get_all_stats()
SCHEDULER.every '30s' do
header,es_health=get_cluster_health()
send_event('elastic_cluster_health',{ items: es_health.values })
indices=get_cluster_indices()
send_event('elastic_search_indices',{ data: indices })
statistics=get_node_stats()
statistics.keys.sort.each {|k|
pie_series = [{ type: 'pie', name: 'Type', data: statistics[k]["memory"] }]
send_event('ES_Node_' + k.to_s,{ series: pie_series })
}
end
end
# +----------------------------------------------------------------------+
#Main
get_all_stats()
#!/usr/bin/env ruby
require 'rubygems'
require 'net/http'
require 'json'
@host = "search.juvnesys.com"
@port = 80
##########################################################################################################################
def fetch(resource)
response = Net::HTTP.get_response(URI.parse("http://" + @host + ":" + @port.to_s + resource))
return response.body
end
##########################################################################################################################
def parse_stats (node_stats)
data = {}
data['os'] = {
"memory" => node_stats['os']['mem']['used_percent']
}
data['jvm'] = {
"used" => node_stats['jvm']['mem']['heap_used_in_bytes'],
"committed" => node_stats['jvm']['mem']['heap_committed_in_bytes']
}
data['gc'] = {
"count" => node_stats['jvm']['gc']['collectors'].inject(0) {|sum, collector| sum += collector.last['collection_count']}
}
data['gc_time'] = {
"time" => node_stats['jvm']['gc']['collectors'].inject(0) {|sum, collector| sum += collector.last['collection_time_in_millis']}
}
data['cache'] = {
"field" => node_stats['indices']['fielddata']['memory_size_in_bytes'],
"filter" => node_stats['indices']['filter_cache']['memory_size_in_bytes'],
"id" => node_stats['indices']['id_cache']['memory_size_in_bytes']
}
data['docs'] = {
"count" => node_stats['indices']['docs']['count']
}
data['ops'] = {
"index" => node_stats['indices']['indexing']['index_total'],
"get" => node_stats['indices']['get']['total'],
"search" => node_stats['indices']['search']['query_total'],
"delete" => node_stats['indices']['indexing']['delete_total']
}
data['store'] = {
"size" => node_stats['indices']['store']['size_in_bytes']
}
return data
end
# +----------------------------------------------------------------------+
def get_stats()
err = IO.new(2, "w")
data = {};
begin
health = fetch('/_cat/health/?v')
node_list = fetch ('/_cat/nodes/?v')
indices = fetch ('/_cat/indices/?v')
stats = JSON.parse(fetch('/_nodes/stats/os,jvm,indices'))
cluster_stats=JSON.parse(fetch('/_cluster/stats?human&pretty'))
fielddata=JSON.parse(fetch('/_stats/fielddata/?pretty=1'))
end
nodes=stats['nodes']
nodes.each {|k, v|
node_stats = v
node_name = node_stats['name']
data = parse_stats (node_stats)
#puts node_name,data,indices
}
health_data="<table border='1'>"
health.lines.each {|v|
health_data=health_data+"<tr>"
v.split.each {|i|
health_data=health_data+"<td>"+i+"</td>"
}
health_data=health_data+"</tr>"
}
health_data=health_data+"</table>"
indices_data="<table border='1'>"
indices.lines.each {|v|
indices_data=indices_data+"<tr bgcolor='green'>"
v.split.each {|i|
indices_data=indices_data+"<td>"+i+"</td>"
}
indices_data=indices_data+"</tr>"
}
indices_data=indices_data+"</table>"
return health_data,indices_data
end
# +----------------------------------------------------------------------+
#SCHEDULER.every '30s' do
health_stats,indices_stats=get_stats()
send_event('elastic_search',{ data: indices_stats })
#send_event('elastic_search',{ health: health_stats,indices: indices_stats })
#end
# +----------------------------------------------------------------------+
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment