Shrijeet shrijeet

View StringFunnel.java
1 2 3 4 5 6 7 8 9 10
package com.example;
 
import com.google.common.hash.Funnel;
import com.google.common.hash.PrimitiveSink;
 
public enum StringFunnel implements Funnel<String> {
INSTANCE;
 
@Override
public void funnel(String from, PrimitiveSink into) {
View KeyValue.java
1 2 3 4 5 6 7 8 9 10
+ 4 // int: Total length of the whole KeyValue.
+ 4 // int: Total length of the key part of the KeyValue.
+ 4 // int: Total length of the value part of the KeyValue.
+ 2 // short: Row key length.
+ key.length // The row key.
+ 1 // byte: Family length.
+ family.length // The family.
+ qualifier.length // The qualifier.
+ 8 // long: The timestamp.
+ 1 // byte: The type of KeyValue.
View hive_no_system_exit.patch
1 2 3 4 5 6 7 8 9 10
if (tsk.ifRetryCmdWhenFail()) {
- if (running.size() != 0) {
+ if (running.size() != 0 && executeTasksInParallel()) {
taskCleanup();
}
// in case we decided to run everything in local mode, restore the
@@ -1183,7 +1183,7 @@ public class Driver implements CommandProcessor {
}
SQLState = "08S01";
console.printError(errorMessage);
View .vimrc
1 2 3 4 5 6 7 8 9 10
" VIM Configuration File
" Description: Optimized for C/C++ development, but useful also for other things.
" Author: Gerhard Gappmeier
"
 
" set UTF-8 encoding
set enc=utf-8
set fenc=utf-8
set termencoding=utf-8
" disable vi compatibility (emulation of old bugs)
View npe_beeswax_1.java
1 2 3 4 5 6 7 8 9 10
java.lang.NullPointerException
at com.cloudera.beeswax.BeeswaxServiceImpl$RunningQueryState.access$600(BeeswaxServiceImpl.java:124)
at com.cloudera.beeswax.BeeswaxServiceImpl.doWithState(BeeswaxServiceImpl.java:770)
at com.cloudera.beeswax.BeeswaxServiceImpl.fetch(BeeswaxServiceImpl.java:980)
at com.cloudera.beeswax.api.BeeswaxService$Processor$fetch.getResult(BeeswaxService.java:987)
at com.cloudera.beeswax.api.BeeswaxService$Processor$fetch.getResult(BeeswaxService.java:971)
at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:206)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
View gist:5314936
1 2 3 4 5 6 7 8 9 10
/etc/init.d/hadoop-hdfs-namenode
/etc/default/hadoop
/etc/default/hadoop-0.20-mapreduce
/etc/default/hadoop-fuse
/etc/default/hadoop-hdfs
/etc/default/hadoop-hdfs-namenode
/etc/default/hadoop-hdfs-secondarynamenode
/usr/lib/hadoop/libexec/hadoop-config.sh
/usr/lib/hadoop/libexec/hadoop-layout.sh
/etc/hadoop/conf/hadoop-env.sh
View gist:5314117
1 2 3 4 5 6 7 8 9 10
export HADOOP_COMMON_HOME=/usr/lib/hadoop
export HADOOP_CONF_DIR=/etc/hadoop/conf
export HADOOP_DATANODE_USER=hdfs
export HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
export HADOOP_HOME=/usr/lib/hadoop-0.20-mapreduce
export HADOOP_HOME_WARN_SUPPRESS=true
export HADOOP_IDENT_STRING=hadoop
export HADOOP_IDENT_STRING=hdfs
export HADOOP_JOBTRACKERHA_USER=mapred
export HADOOP_JOBTRACKER_USER=mapred
View tmux-for-iterm2.rb
1 2 3 4 5 6 7 8 9 10
require 'formula'
 
class TmuxForIterm2 < Formula
url 'http://iterm2.googlecode.com/files/tmux-for-iTerm2-20130302.tar.gz'
sha1 '83d1389eb55b55bc069e0b66a11aa0a8faf9cddd'
homepage 'http://code.google.com/p/iterm2/wiki/TmuxIntegration'
 
depends_on 'libevent'
 
def install
View gist:4968275
1 2 3 4 5 6 7 8 9
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
String[] otherArgs =
new GenericOptionsParser(conf, args).getRemainingArgs();
Job job = createSubmittableJob(conf, otherArgs);
if (job != null) {
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
View delete_blocks.log
1 2 3 4 5
2013-02-11 02:40:15,560 INFO org.apache.hadoop.hdfs.StateChange: BLOCK* ask 172.22.4.30:50010 to replicate blk_-8282418489515119773_208956459 to datanode(s) 172.22.4.36:50010
2013-02-11 02:40:19,711 INFO org.apache.hadoop.hdfs.StateChange: BLOCK* NameSystem.addStoredBlock: blockMap updated: 172.22.4.36:50010 is added to blk_-8282418489515119773_208956459 size 1882
2013-02-11 02:52:14,531 INFO org.apache.hadoop.hdfs.StateChange: BLOCK* NameSystem.addStoredBlock: blockMap updated: 172.22.24.37:50010 is added to blk_-8282418489515119773_208956459 size 1882
2013-02-11 02:52:14,531 INFO org.apache.hadoop.hdfs.StateChange: BLOCK* NameSystem.chooseExcessReplicates: (172.22.4.30:50010, blk_-8282418489515119773_208956459) is added to recentInvalidateSets
2013-02-11 02:52:30,141 INFO org.apache.hadoop.hdfs.StateChange: BLOCK* ask 172.22.4.30:50010 to delete blk_-7899761269070348109_200978153 blk_-8553516317821166119_181078974 blk_-6417975954560547521_204368816 blk_-7917102129184696044_160428177 blk_-884761233393181724
Something went wrong with that request. Please try again.