Skip to content

Instantly share code, notes, and snippets.

@GedowFather
Last active September 17, 2021 03:34
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save GedowFather/d842680b25dad3b592ddd2ca39929754 to your computer and use it in GitHub Desktop.
Save GedowFather/d842680b25dad3b592ddd2ca39929754 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python3
import os
import json
import hashlib
import glob
#
# Config
#
DATA_DIR = "/tmp/lockwait"
RANK_MAX = 100
#
# Read result
#
logs = []
for file_path in glob.glob(f"{DATA_DIR}/*.json"):
f = open(file_path, "r")
file_data = f.read()
f.close()
logs += json.loads(file_data)
if not logs:
print("Not found lock wait log.")
quit()
#
# Rank result
#
results = {}
for log in logs:
waiting_query_hash = hashlib.md5(log['waiting_query'].encode('utf-8')).hexdigest()[0:8]
blocking_query_hash = hashlib.md5(str(log['blocking_query']).encode('utf-8')).hexdigest()[0:8]
row_hash = waiting_query_hash + blocking_query_hash
if row_hash not in results:
results[row_hash] = {
'count' : 0,
'lock_type' : f"{log['lock_mode']}:{log['lock_type']}",
'lock_table' : log['lock_table'].replace('`', ''),
'lock_index' : log['lock_index'],
'waiting_query' : log['waiting_query'],
'waiting_wait_secs_min' : log['waiting_wait_secs'],
'waiting_wait_secs_max' : log['waiting_wait_secs'],
'waiting_wait_secs_sum' : 0,
'blocking_query' : log['blocking_query'],
'blocking_table_locked' : log['blocking_table_locked'],
'blocking_rows_locked' : log['blocking_rows_locked'],
'blocking_rows_modified': log['blocking_rows_modified'],
'blocking_age_min' : log['blocking_age'],
'blocking_age_max' : log['blocking_age'],
'blocking_status' : log['blocking_status'],
}
results[row_hash]['count'] += 1
results[row_hash]['waiting_wait_secs_sum'] += log['waiting_wait_secs']
if results[row_hash]['waiting_wait_secs_min'] > log['waiting_wait_secs']:
results[row_hash]['waiting_wait_secs_min'] = log['waiting_wait_secs']
if results[row_hash]['waiting_wait_secs_max'] < log['waiting_wait_secs']:
results[row_hash]['waiting_wait_secs_max'] = log['waiting_wait_secs']
if results[row_hash]['blocking_age_min'] > log['blocking_age']:
results[row_hash]['blocking_age_min'] = log['blocking_age']
if results[row_hash]['blocking_age_max'] < log['blocking_age']:
results[row_hash]['blocking_age_max'] = log['blocking_age']
ranks = sorted(results.values(), key=lambda x:x['count'], reverse=True)
number = 0
for r in ranks:
number += 1
output = ""
output += "#\n" \
f"# {number}\n" \
"#\n\n"
output += f"Count : {r['count']}\n" \
f"LockType : {r['lock_type']}\n" \
f"LockTable : {r['lock_table']}\n" \
f"LockIndex : {r['lock_index']}\n" \
f"WaitQuery : {r['waiting_query']}\n" \
f"WaitTotalTime : {r['waiting_wait_secs_sum']}\n" \
f"WaitSecs : {r['waiting_wait_secs_min']} - {r['waiting_wait_secs_max']}\n" \
f"BlockQuery : {r['blocking_query']}\n" \
f"BlockStatus : {r['blocking_status']}\n" \
f"BlockAge : {r['blocking_age_min']} - {r['blocking_age_max']}\n" \
f"TablesLocked : {r['blocking_table_locked']}\n" \
f"RowsLocked : {r['blocking_rows_locked']}\n" \
f"RowsModify : {r['blocking_rows_modified']}\n"
print(output)
if number >= RANK_MAX: break
#!/usr/bin/env python3
#
# Install example on Amazon Linux
"""
yum localinstall -y https://dev.mysql.com/get/mysql80-community-release-el7-2.noarch.rpm
yum install -y gcc python3-devel mysql-community-devel
pip3 install mysqlclient
"""
#
import os
import re
import time
import json
from datetime import datetime
from warnings import filterwarnings
import MySQLdb
filterwarnings('ignore', category = MySQLdb.Warning)
#
# Config
#
DB_HOST = "localhost"
DB_PORT = 3306
DB_USER = "gedow"
DB_PASS = "gedowfather"
PERIOD_SEC = 3
WAIT_SEC = 1
DATA_DIR = "/tmp/lockwait"
#
# Query
#
connect = MySQLdb.connect(
host = DB_HOST,
port = DB_PORT,
user = DB_USER,
passwd = DB_PASS,
connect_timeout = 5,
)
cursor = connect.cursor(MySQLdb.cursors.DictCursor)
query = "SHOW VARIABLES LIKE 'version'"
cursor.execute(query)
version = int(cursor.fetchall()[0]['Value'][0])
query = "SELECT @@system_time_zone AS system_time_zone"
cursor.execute(query)
system_time_zone = cursor.fetchall()[0]['system_time_zone']
if system_time_zone == 'UTC':
query = "SET SESSION time_zone = @@system_time_zone"
cursor.execute(query)
query = "SELECT " \
"r.trx_id AS waiting_trx_id, r.trx_wait_started AS waiting_wait_started, " \
"r.trx_tables_locked AS waiting_table_locked, r.trx_rows_locked AS waiting_rows_locked, " \
"r.trx_mysql_thread_id AS waiting_thread, " \
"r.trx_query AS waiting_query, r.trx_rows_modified AS waiting_rows_modified, " \
"TIMESTAMPDIFF(SECOND, r.trx_started, NOW()) AS waiting_age, " \
"TIMESTAMPDIFF(SECOND, r.trx_wait_started, NOW()) AS waiting_wait_secs, " \
"rp.user AS waiting_user, rp.host AS waiting_host, rp.db AS waiting_db," \
\
"b.trx_id AS blocking_trx_id, b.trx_mysql_thread_id AS blocking_thread, " \
"b.trx_tables_locked AS blocking_table_locked, b.trx_rows_locked AS blocking_rows_locked, " \
"b.trx_query AS blocking_query, b.trx_rows_modified AS blocking_rows_modified, " \
"TIMESTAMPDIFF(SECOND, b.trx_started, NOW()) AS blocking_age, " \
"TIMESTAMPDIFF(SECOND, b.trx_wait_started, NOW()) AS blocking_wait_secs, " \
"bp.user AS blocking_user, bp.host AS blocking_host, bp.db AS blocking_db, " \
\
"CONCAT(bp.command, IF(bp.command = 'Sleep', CONCAT(' ', bp.time), '')) AS blocking_status, " \
"lock_mode AS lock_mode, lock_type AS lock_type, "
if version == 5:
query += "lock_table AS lock_table, lock_index AS lock_index " \
"FROM INFORMATION_SCHEMA.INNODB_LOCK_WAITS w " \
"JOIN INFORMATION_SCHEMA.INNODB_TRX b ON b.trx_id = w.blocking_trx_id " \
"JOIN INFORMATION_SCHEMA.INNODB_TRX r ON r.trx_id = w.requesting_trx_id " \
"JOIN INFORMATION_SCHEMA.INNODB_LOCKS l ON l.lock_id = w.requested_lock_id "
elif version == 8:
query += "object_name AS lock_table, index_name AS lock_index " \
"FROM performance_schema.data_lock_waits w " \
"JOIN INFORMATION_SCHEMA.INNODB_TRX b ON b.trx_id = w.blocking_engine_transaction_id " \
"JOIN INFORMATION_SCHEMA.INNODB_TRX r ON r.trx_id = w.requesting_engine_transaction_id " \
"JOIN performance_schema.data_locks l ON l.engine_lock_id = w.requesting_engine_lock_id "
else:
raise("Supported mysql version are 5.x or 8.x")
query += "LEFT JOIN INFORMATION_SCHEMA.PROCESSLIST bp ON bp.id = b.trx_mysql_thread_id " \
"LEFT JOIN INFORMATION_SCHEMA.PROCESSLIST rp ON rp.id = r.trx_mysql_thread_id "
#
# Get data
#
locks = {}
for i in range(int(PERIOD_SEC / WAIT_SEC)):
get_unixtime = int(time.time())
cursor.execute(query)
rows = [x for x in cursor.fetchall()]
for r in rows:
r['unixtime'] = get_unixtime
r['waiting_wait_started'] = int(time.mktime(r['waiting_wait_started'].timetuple()))
if r['blocking_trx_id'] not in locks:
locks[r['blocking_trx_id']] = {}
if r['waiting_trx_id'] not in locks[r['blocking_trx_id']]:
locks[r['blocking_trx_id']][r['waiting_trx_id']] = {}
locks[r['blocking_trx_id']][r['waiting_trx_id']][r['waiting_wait_started']] = r
time.sleep(WAIT_SEC)
if not locks:
print("Nof found lock wait query.")
quit()
#
# Sort data
#
def uniqueQuery(query):
query = query.rstrip(';')
ja_regex = r'[0-9ぁ-んァ-ン一-龥,.、。ー-・]+'
query = re.sub(ja_regex, '[ja]', query, flags=re.IGNORECASE)
replacements = [
[r" {2,}", " "],
[r"'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}'", "!TIMESTAMP!"],
[r"'\d{4}-\d{2}-\d{2}'", "!DATE!"],
[r"'[^']*'", "'?'"],
[r"!TIMESTAMP!", "'????-??-?? ??:??:??'"],
[r"!DATE!", "'????-??-??'"],
[r"([ (,=])-*[\d.]+", r'\1?'],
[r" (IN) ([^\)]+)", r' \1 (?,...)'],
[r" (VALUES) (\([^\)]+\),).*", r' \1 \2...'],
]
for r in replacements:
query = re.sub(r[0], r[1], query, flags=re.IGNORECASE)
return query
results = []
for blocking_trx_id,blocks in list(locks.items()):
for waiting_trx_id,waits in list(blocks.items()):
for waiting_wait_started,info in list(waits.items()):
info['waiting_query'] = uniqueQuery(info['waiting_query'])
if info['blocking_query']:
info['blocking_query'] = uniqueQuery(info['blocking_query'])
results.append(info)
#
# Save result
#
now = datetime.now()
datetime_format = "%Y%m%d_%H%M%S"
datetime_value = now.strftime(datetime_format)
file_name = f"lockwait_{DB_HOST}_{datetime_value}.json"
file_path = f"{DATA_DIR}/{file_name}"
os.makedirs(DATA_DIR, exist_ok=True)
f = open(file_path, "w")
f.write(json.dumps(results))
f.close()
print(f"Saved result to {file_path}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment