Skip to content

Instantly share code, notes, and snippets.

function deleteFromGmail(search_query) {
let batchSize = 100;
let threads = GmailApp.search(search_query);
for (slice_index = 0; slice_index < threads.length; slice_index += batchSize) {
GmailApp.moveThreadsToTrash(threads.slice(slice_index, slice_index + batchSize));
}
}
function deleteOldSocialMailsFromGmail() {
// Delete 6 months old Social Emails
@ershad
ershad / query_finder.sql
Created February 1, 2019 22:14 — forked from mezis/query_finder.sql
Finding long-running queries in MySQL
SELECT id,state,command,time,left(replace(info,'\n','<lf>'),120)
FROM information_schema.processlist
WHERE command <> 'Sleep'
AND info NOT LIKE '%PROCESSLIST%'
ORDER BY time DESC LIMIT 50;
addEventListener("fetch", event => {
event.respondWith(verifyAndCache(event.request));
});
async function verifyAndCache(request) {
/**
source:
https://jameshfisher.com/2017/10/31/web-cryptography-api-hmac.html
https://github.com/diafygi/webcrypto-amples#hmac-verify
@ershad
ershad / rack_attack.rb
Last active March 5, 2019 09:36
Rack::Attack sample configuration
class Rack::Attack
# By default, Rack::Attack uses `Rails.cache` to store requests information.
# It's configurable as follows -
#
# redis_client = Redis.connect(url: ENV["REDIS_URL"])
# Rack::Attack.cache.store = Rack::Attack::StoreProxy::RedisStoreProxy.new(redis_client)
class Request < ::Rack::Request
resource "aws_cloudwatch_metric_alarm" "redis_eviction_alarm" {
alarm_name = "production-redis-high-eviction-rate"
comparison_operator = "GreaterThanThreshold"
evaluation_periods = "1"
period = "60"
metric_name = "Evictions"
dimensions {
CacheClusterId = "< Redis CluserId >"
}
#!/bin/bash
# Remove all imported keys
grep -v bigbinary_imported ~/.ssh/authorized_keys > /tmp/authorized_keys
mv /tmp/authorized_keys ~/.ssh/authorized_keys
# Write keys
curl -s http://bigbinary.com/team/ssh_keys.txt | grep "^ssh-rsa" | sed "s/$/\ bigbinary_imported/g" >> ~/.ssh/authorized_keys
echo "Updated SSH keys."
#!/bin/bash
ps -A --sort -rss -o comm,pmem,rss | awk '
NR == 1 { print; next }
{ a[$1] += $2; b[$1] += $3; }
END {
for (i in a) {
size_in_bytes = b[i] * 1024
split("B KB MB GB TB PB", unit)
human_readable = 0
if (size_in_bytes == 0) {
# encoding: utf-8
require 'xmlsimple'
require 'csv'
hash = XmlSimple.xml_in('m.xml')
start_date = DateTime.parse "2013-12-28"
end_data = Date.today
// Show GitHub organizations
// Example: "company1", "company2"
"include_orgs": [],
# Usage:
# shell$ irb
# 1.9.3p194 :001 > require '~/Desktop/Po'
# 1.9.3p194 :002 > Po.new('~/code/translation/evolution-data-server.master.ml.po').split(3, 'evolution.po')
# Files named 1_evolution.po, 2_evolution.po, 3_evolution.po will be written to your current working directory.
class Po
attr_accessor :po_file_array
def initialize(file)