Skip to content

Instantly share code, notes, and snippets.

@shantanuo
shantanuo / status.py
Last active August 29, 2015 14:05
analyse status_success file created by cron
import pandas as pd
import os
# status file generated by this cron entry
# * * * * * mysqladmin status >> /home/mysore/status_success.txt 2>> /home/mysore/status_err.txt
# select date_sub(now(), INTERVAL variable_value SECOND) started_at from information_schema.global_status where variable_name='Uptime';
started_at = '2014-08-28 16:50:06'
u_cols = ['seconds', 'threads', 'questions', 'slow']
# install Django and start Django
pip install Django==1.7
django-admin startproject dj_apache
cd dj_apache
python manage.py runserver 0.0.0.0:8000
import argparse
import json
import time
import boto.ec2
# Set up argument parser
parser = argparse.ArgumentParser(
description='Request AWS EC2 spot instance and tag instance and volumes.',
@shantanuo
shantanuo / log_process.py
Created November 4, 2014 08:23
create a log of all running processes every 5 minutes
import os
import psutil
import time
def create_process_logs(log_path):
"""
Create a log of all the currently running processes
"""
if not os.path.exists(log_path):
try:
$ cd /tmp
$ echo "foo bar baz foo foo quz" > test1
$ echo "bar bar foo bar baz foo quz foo foo" > test2
$ sed -i '.bak' 's/foo/replaced/g' test*
$ tail test1 test2
==> test1 <==
replaced bar baz replaced replaced quz
==> test2 <==
bar bar replaced bar baz replaced quz replaced replaced
def ncpr (searchnm):
import urllib2
from bs4 import BeautifulSoup
mynumber = searchnm
url = "http://www.nccptrai.gov.in/nccpregistry/saveSearchSub.misc?phoneno=" + mynumber
soup = BeautifulSoup(urllib2.urlopen(url))
header = soup.find('td', class_='GridHeader')
import psutil
import simplejson
import time
import urllib2
# Sends the CPU and memory usage stats to Loggly. Sends both a machine total and per process.
def log():
""""""
proc_dict = {}
#!/usr/bin/python
#
# Convert a Row-Based-Replication binary log to Statement-Based-Replication format, cheating a little.
# This script exists since Percona Toolkit's pt-query-digest cannot digest RBR format. The script
# generates enough for it to work with.
# Expecting standard input
# Expected input is the output of "mysqlbinlog --verbose --base64-output=DECODE-ROWS <binlog_file_name>"
# For example:
# $ mysqlbinlog --verbose --base64-output=DECODE-ROWS mysql-bin.000006 | python binlog-rbr-to-sbr.py | pt-query-digest --type=binlog --order-by Query_time:cnt --group-by fingerprint
#
@shantanuo
shantanuo / top_output.txt
Created February 28, 2015 12:50
httpd, node and npm processes are consuming almost all memory and making the server slow.
top - 18:13:16 up 262 days, 5:23, 8 users, load average: 0.14, 0.16, 0.10
Tasks: 326 total, 1 running, 325 sleeping, 0 stopped, 0 zombie
Cpu(s): 0.6%us, 0.1%sy, 0.0%ni, 99.0%id, 0.3%wa, 0.0%hi, 0.0%si, 0.0%st
Mem: 16253716k total, 15669816k used, 583900k free, 308524k buffers
Swap: 32767992k total, 218848k used, 32549144k free, 8928732k cached
PID USER PR NI VIRT RES S SHR %CPU %MEM TIME+ COMMAND
25388 mysql 20 0 4865m 621m S 7176 7.9 3.9 8181:47 mysqld
7767 root 20 0 1020m 122m S 6900 3.9 0.8 16:02.42 node
25296 root 20 0 8799m 736m S 10m 2.0 4.6 1452:06 java
def parse_connection(connection_string):
"""Work out the user name and password in connection_string
Connection string should be of the form 'database://username@password'
"""
if '@' in connection_string:
# Username is characters between '://' and '@'
slash_position = connection_string.find('://')
at_position = connection_string.find('@')
user_name = connection_string[slash_position+3:at_position]