Skip to content

Instantly share code, notes, and snippets.

#!/bin/sh
# Configure SuperMicro boxes
#
# We end up with the following:
# * RAID1 for the OS (2 drives)
# * RAID10 num 1, (16 drives)
# * RAID10 num 2, (16 drives)
# * 2x hot spare
/opt/MegaRAID/MegaCli/MegaCli64 -CfgSpanAdd -r10 -Array0[0:0,0:1] -Array1[0:2,0:3] -Array2[0:4,0:5] -Array3[0:6,0:7] -Array4[0:8,0:9] -Array5[0:10,0:11] -Array6[1:0,1:1] -Array7[1:2,1:3] -Array8[1:4,1:5] -Array9[1:6,1:7] -Array10[1:8,1:9] -Array11[1:10,1:11] -Array12[1:12,1:13] -Array13[1:14,1:15] -Array14[1:16,1:17] -Array15[1:18,1:19] -Array16[1:20,1:21] -Array17[1:22,1:23] WB NORA Direct -a0 > cfgspanoutput2
from time import time
from logging.config import fileConfig
from twisted.internet import epollreactor
epollreactor.install()
from flask import Flask, request
app = Flask(__name__)
fileConfig("logging.ini")
func min(a1 int, a2 int, a3 int) int {
min := a1
if a2 < min {
min = a2
}
if a3 < min {
min = a3
}
return min
}
package LevenshteinDistance
import "fmt"
import "math"
func compare(a, b string) int {
var cost int
d := make([][]int, len(a)+1)
for i := 0; i < len(d); i++ {
d[i] = make([]int, len(b)+1)
@asenchi
asenchi / redis_leaky_bucket.py
Created November 17, 2012 18:14 — forked from jdunck/redis_leaky_bucket.py
leaky bucket queue - redis 2.6 + lua + python
#cribbed from http://vimeo.com/52569901 (Twilio carrier call origination moderation)
# The idea is that many fan-in queues can enqueue at any rate, but
# dequeue needs to happen in a rate-controlled manner without allowing
# any individual input queue to starve other queues.
# http://en.wikipedia.org/wiki/Leaky_bucket (second sense, "This version is referred to here as the leaky bucket as a queue.")
#
# requires:
# redis 2.6+
# redis-py>=2.7.0
# anyjson
@asenchi
asenchi / logstash-mysql-query-parse.md
Created September 20, 2012 01:06 — forked from jordansissel/logstash-mysql-query-parse.md
parsing mysql's bullshit query log format with logstash

parsing mysql query logs with logstash

The problem is that some lines in the file are missing timestamps when they aren't continuations of any previous line. It's dumb, really.

The mysql query log is seriously bullshit format, but nothing logstash can't unscrew.

The main goal here is to show how we can fix the 'missing timestamp' problem.

% ruby bin/logstash agent -e '

(defun involved (population percentage)
"Figure out the number of cookers given a percentage"
(* population percentage))
(defun total-meals (givers meals-per-year)
"Total amount of meals per year given cookers and meals per month"
(* givers meals-per-year))
(defun number-of-families (meals per-day)