Last active
December 7, 2016 02:32
-
-
Save wangyiyang/dc0a0c66b69ca9a2149543191cf36780 to your computer and use it in GitHub Desktop.
elasticsearch批量删除脚本
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding: utf-8 -*- | |
# python elasticsearch_pulk_del.py --keep=31 --ip=10.140.65.12 保留31天index | |
# python elasticsearch_pulk_del.py --start_index=0 --end_index=10 --ip=10.140.65.12 删除最老的11个index | |
# python elasticsearch_pulk_del.py --om=true --ip=10.140.65.12 优化merge吞吐量 | |
import urllib2 | |
import json | |
import time | |
import datetime | |
from optparse import OptionParser | |
parser = OptionParser() | |
parser.add_option('--type', action='store', type='string', | |
dest='type', default="del") | |
parser.add_option('--ip', action='store', type='string', dest='ip') | |
parser.add_option('--start_index', action='store', | |
type='int', dest='start_index', default=None) | |
parser.add_option('--end_index', action='store', type='int', | |
dest='end_index', default=None) | |
parser.add_option('--keep', action='store', type='int', | |
dest='keep', default=None) | |
parser.add_option('--om', action='store', type='string', | |
dest='om', default=None) | |
options, args = parser.parse_args() | |
def get_index_names(ip): | |
url = "http://%s:9200/_stats" % ip | |
response = urllib2.urlopen(url, timeout=20).read() | |
index_names = json.loads(response) | |
return index_names['indices'].keys() | |
def get_index_creation_date(ip, index_name): | |
try: | |
url = "http://%s:9200/%s" % (ip, index_name) | |
response = urllib2.urlopen(url, timeout=20).read() | |
index_info = json.loads(response) | |
return index_info[index_name]["settings"]["index"]["creation_date"] | |
except: | |
print "get_index_creation_date error" | |
return | |
def del_index(ip, index_name): | |
url = "http://%s:9200/%s" % (ip, index_name) | |
request = urllib2.Request(url) | |
request.get_method = lambda: 'DELETE' | |
response = urllib2.urlopen(request, timeout=20).read() | |
index_info = json.loads(response) | |
return index_info | |
def close_index(ip, index_name): | |
url = "http://%s:9200/%s/_close" % (ip, index_name) | |
values = {} | |
jdata = json.dumps(values) # 对数据进行JSON格式化编码 | |
req = urllib2.Request(url, jdata) # 生成页面请求的完整数据 | |
response = urllib2.urlopen(req) | |
return response.read() | |
def keep_indexs_days_ago(days, index_list): | |
if days < 1: | |
print "days out of range" | |
return | |
keep_day = datetime.datetime.now() - datetime.timedelta(days=(days - 1)) | |
keep_day = keep_day.strftime("%Y%m%d") | |
keep_day = time.strptime(keep_day, "%Y%m%d") | |
keep_day = int(time.mktime(keep_day) * 1000) | |
result = [] | |
for index_info in index_list: | |
if int(index_info["index_creation_date"]) < keep_day: | |
result.append(index_info["index_name"]) | |
return result | |
def get_index_range(start_index, end_index, index_list): | |
if options.end_index > len(index_list): | |
print "out of range" | |
return None | |
return index_list[options.start_index:options.end_index] | |
def optimized_merge(ip): | |
url = "http://%s:9200/_cluster/settings" % ip | |
param = { | |
"persistent": | |
{ | |
"indices.store.throttle.type": "merge", | |
"indices.store.throttle.max_bytes_per_sec": "200mb" | |
} | |
} | |
param = json.dumps(param) | |
request = urllib2.Request(url, param) | |
request.get_method = lambda: 'PUT' | |
response = urllib2.urlopen(request, timeout=20).read() | |
result = json.loads(response) | |
print "optimized_merge:%s" % result["acknowledged"] | |
return result | |
def main(): | |
ip = options.ip | |
index_names = get_index_names(ip) | |
all_index_list = [] | |
result_index_list = None | |
for index_name in index_names: | |
index_creation_date = get_index_creation_date(ip, index_name) | |
all_index_list.append({"index_name": index_name, | |
"index_creation_date": index_creation_date}) | |
all_index_list = sorted(all_index_list) | |
if options.om == "true": | |
result = optimized_merge(ip) | |
return result | |
if options.keep and not options.start_index and not options.end_index: | |
result_index_list = keep_indexs_days_ago(options.keep, all_index_list) | |
if options.start_index or options.end_index: | |
result_index_list = get_index_range( | |
options.start_index, options.end_index, all_index_list) | |
if not result_index_list: | |
print "no result" | |
return | |
print result_index_list | |
confirm_commond = raw_input("ready?:(y/n):") | |
if confirm_commond == "n": | |
return | |
if confirm_commond == "y": | |
if options.type == "close": | |
for index_name in result_index_list: | |
print u"index to be closed: %s" % index_name | |
print u"index already closed: %s \n" % \ | |
close_index(ip, index_name) | |
else: | |
for index_name in result_index_list: | |
print u"index to be deleted: %s" % index_name | |
print u"index already deleted: %s \n" % \ | |
del_index(ip, index_name)["acknowledged"] | |
return | |
if __name__ == '__main__': | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment