Skip to content

Instantly share code, notes, and snippets.

# encoding: utf-8
import os
import shelve
import boto.glacier
import boto
from boto.glacier.exceptions import UnexpectedHTTPResponseError
ACCESS_KEY_ID = "XXXXXXXXXXXXX"
SECRET_ACCESS_KEY = "XXXXXXXXXXX"
SHELVE_FILE = os.path.expanduser("~/.glaciervault.db")
@shantanuo
shantanuo / mysql_debug.sh
Last active June 1, 2022 03:19
gather and send mysql statistics to be analysed
#!/bin/sh
user='root'
password='company'
adminmail='s.o@gmail.com'
> to_study.txt
> to_study_err.txt
mysqladmin -u$user -p$password debug
errorlog=`mysqladmin variables | grep log_error | awk '{print $4}'`
# encoding: utf-8
import os
import shelve
import boto.glacier
import boto
from boto.glacier.exceptions import UnexpectedHTTPResponseError
ACCESS_KEY_ID = "XXXXXXXXXXXXX"
SECRET_ACCESS_KEY = "XXXXXXXXXXX"
SHELVE_FILE = os.path.expanduser("~/.glaciervault.db")
@shantanuo
shantanuo / amazon.sh
Last active December 27, 2015 11:09
create an instance dynamically using boto
#!/bin/sh
cat > create_image.py << heredoc
import boto.ec2
myami='ami-eba0f882'
mytype='t1.micro'
myvolume='vol-cf5d9282'
@shantanuo
shantanuo / s3upload.py
Created December 17, 2013 07:46
upload a file to amazon glaciercopy folder. This has a rule that will archive 7 days old files to glacier to save cost.
# python s3upload.py /path/file.txt
import sys
myfile = sys.argv[1]
import os
onlyfile=os.path.split(myfile)[-1]
import boto
from boto.s3.connection import OrdinaryCallingFormat
@shantanuo
shantanuo / redshift.py
Created December 17, 2013 07:48
start a cluster if there is no cluster already running. Use start or stop after the file name. For e.g. # python redshift.py stop
#!/usr/bin/python
import boto
conn = boto.connect_redshift(aws_access_key_id='key', aws_secret_access_key='secret')
import datetime
mymonth = datetime.datetime.now().strftime("%b").lower()
myday = datetime.datetime.now().strftime("%d")
myvar = mymonth+myday+'-viva-mar5-deliveryreport-new'
@shantanuo
shantanuo / instance.py
Last active December 31, 2015 14:39
1 for ubuntu with freeswitch & interspire or 2 for tokudb on redhat # python instance.py 1 # python instance.py --help
#!/usr/bin/python
access_key='ABC'
secret_key='PQR+XYZ'
my_instance_type='t1.micro'
# following instance types are available
# t1.micro m1.small m1.medium m1.large m1.xlarge m3.xlarge m3.2xlarge m2.xlarge m2.2xlarge m2.4xlarge c1.medium c1.xlarge hi1.4xlarge hs1.8xlarge
my_security_groups='N-1-0-1-AutogenByAWSMP-'
<?php
/*
POST / HTTP/1.1
Host: kinesis.<region>.<domain>
x-amz-Date: <Date>
Authorization: AWS4-HMAC-SHA256 Credential=<Credential>, SignedHeaders=content-type;date;host;user-agent;x-amz-date;x-amz-target;x-amzn-requestid, Signature=<Signature>
User-Agent: <UserAgentString>
Content-Type: application/x-amz-json-1.1
Content-Length: <PayloadSizeBytes>
@shantanuo
shantanuo / kinesys.py
Created December 19, 2013 10:26
Amazon Kinesis is a managed service that scales elastically for real time processing of streaming big data. Here is python script to operate the basic functions.
# put and get data
import boto
mykin = boto.connect_kinesis(aws_access_key_id='access',
aws_secret_access_key='passWord')
myput = mykin.put_record(stream_name='mytest', data='abcdefghij223344',
partition_key='parti11223344', b64_encode=True)
myiterator = mykin.get_shard_iterator(stream_name='mytest',
shard_id='shardId-000000000000',
shard_iterator_type='TRIM_HORIZON')
@shantanuo
shantanuo / tokudb_copy.sh
Created January 26, 2014 15:12
Copy toku database to another server without using dump
#!/bin/sh
# copy the database source files to destination
dbname='palus'
source='/DATA/4GLV/tokudb/'
destip='192.168.150.137'
destuser='db'
destpass='db'
destport='3306'