Skip to content

Instantly share code, notes, and snippets.

192 pthread_cond_timedwait@@GLIBC_2.3.2,cnd_timedwait,rd_kafka_q_pop_serve,rd_kafka_broker_ops_serve,abs_timeout=<optimized,rd_kafka_broker_producer_serve,rd_kafka_broker_serve.constprop.74,rd_kafka_broker_thread_main,start_thread,clone
112 epoll_wait,iv_fd_epoll_timerfd_poll,iv_fd_poll_and_run,iv_main,_worker_thread,_worker_thread_func,??,start_thread,clone
46 poll,rd_kafka_transport_poll,rd_kafka_transport_io_serve,rd_kafka_broker_ops_io_serve,rd_kafka_broker_producer_serve,rd_kafka_broker_serve.constprop.74,rd_kafka_broker_thread_main,start_thread,clone
14 pthread_cond_timedwait@@GLIBC_2.3.2,cnd_timedwait,rd_kafka_q_serve,rd_kafka_thread_main,start_thread,clone
14 pthread_cond_timedwait@@GLIBC_2.3.2,cnd_timedwait,rd_kafka_q_pop_serve,rd_kafka_broker_ops_serve,abs_timeout=<optimized,rd_kafka_broker_internal_serve,rd_kafka_broker_serve.constprop.74,rd_kafka_broker_thread_main,start_thread,clone
4 epoll_wait,iv_fd_epoll_timerfd_poll,iv_fd_poll_and_run,iv_main,iv_work_thread,iv_thr
Host github.com
PermitLocalCommand no
Host *
RequestTTY force
RemoteCommand env term=$TERM TERM=xterm-256color bash -i -c 'export PROMPT_COMMAND='"'"'if [ x"$term" == "xxterm-256colour" ]; then es=$"\033]0;"; else es=$"\033k"; fi; printf "$es%%s@%%s:%%s\033\\" "${USER}" "${HOSTNAME%%.*}" "${PWD/#$HOME/\~}"'"'"'; bash -i'

Keybase proof

I hereby claim:

  • I am johnskopis on github.
  • I am johnskopis (https://keybase.io/johnskopis) on keybase.
  • I have a public key ASABD12mAyh9bfpe_n3VxhyYRKkQk6L0fdYuzQTFMMzraAo

To claim this, I am signing this object:

class Foo
def baz
'The original class is noisy'
raise 'Noise'
end
end
module MonkeyPatch
def self.extended(obj)
[ :baz ].each do |meth|
require 'csv'
DEFAULT_ATTRS = %w[
network.interfaces.eth0.addresses
network.interfaces.eth0.addresses
]
DEFAULT_QUERY = 'role:base'
DEFAULT_FILTER = Proc.new do |addresses|
ipa = addresses.select { |address, config| config[:family] == 'inet' }.keys
ipa.size == 1 ? ipa.first : ipa
end
rpm -Uvh http://archive.cloudera.com/cdh4/one-click-install/redhat/6/x86_64/cloudera-cdh-4-0.x86_64.rpm
yum -y install hadoop-0.20-conf-pseudo hbase hbase-regionserver hbase-master
sudo -u hdfs hdfs namenode -format
for x in `cd /etc/init.d ; ls hadoop-hdfs-*` ; do sudo service $x start ; done
sudo -u hdfs hadoop fs -mkdir /tmp
sudo -u hdfs hadoop fs -chmod -R 1777 /tmp
sudo -u hdfs hadoop fs -mkdir -p /var/lib/hadoop-hdfs/cache/mapred/mapred/staging
sudo -u hdfs hadoop fs -chmod 1777 /var/lib/hadoop-hdfs/cache/mapred/mapred/staging
sudo -u hdfs hadoop fs -chown -R mapred /var/lib/hadoop-hdfs/cache/mapred
for x in `cd /etc/init.d ; ls hadoop-0.20-mapreduce-*` ; do sudo service $x start ; done
@johnskopis
johnskopis / titan-job
Created November 6, 2013 06:24
titan jenkins job
<?xml version='1.0' encoding='UTF-8'?>
<maven2-moduleset plugin="maven-plugin@1.530">
<actions/>
<description>titan snapshot</description>
<keepDependencies>false</keepDependencies>
<properties>
<hudson.model.ParametersDefinitionProperty>
<parameterDefinitions>
<hudson.model.TextParameterDefinition>
<name>TITAN_VERSION</name>
[root@sfdb32 ~]# pmp
1051 read,??,my_net_read,do_command(THD*),do_handle_one_connection(THD*),handle_one_connection,start_thread,clone
96 pthread_cond_wait@@GLIBC_2.3.2,Query_cache::lock(),Query_cache::invalidate_table(THD*,,Query_cache::invalidate_table(THD*,,Query_cache::invalidate(THD*,,mysql_update(THD*,,mysql_execute_command(THD*),mysql_parse(THD*,,dispatch_command(enum_server_command,,do_handle_one_connection(THD*),handle_one_connection,start_thread,clone
36 ??,??,??,??,start_thread,clone
12 pthread_cond_wait@@GLIBC_2.3.2,Query_cache::lock(),Query_cache::invalidate_table(THD*,,Query_cache::invalidate_table(THD*,,Query_cache::invalidate(THD*,,mysql_insert(THD*,,mysql_execute_command(THD*),mysql_parse(THD*,,dispatch_command(enum_server_command,,do_handle_one_connection(THD*),handle_one_connection,start_thread,clone
12 pthread_cond_wait@@GLIBC_2.3.2,??,??,??,??,??,??,??,??,??,??,handler::ha_write_row(unsigned,write_record(THD*,,mysql_insert(THD*,,mysql_execute_command(THD*),mysql_par
#!/usr/bin/env python
# originally from git@github.com:dlrust/python-memcached-stats.git
import re, telnetlib, sys, time, math
import cStringIO as StringIO
from collections import defaultdict
class Transforms:
_transforms = [
(re.compile(ur'[_/](\d+)[_/]?'), '_?_'),
(re.compile(ur':([a-f0-9]{32})$'), ':?'),
#!/usr/bin/env ruby
#
require 'active_support/core_ext'
require 'benchmark'
class FooException < StandardError; end
def fun(depth = 0)
if depth == 150