Skip to content
View AppSchedulable.java
if (rackLocalRequest != null && rackLocalRequest.getNumContainers() != 0
&& localRequest != null && localRequest.getNumContainers() != 0) {
return assignContainer(node, localRequest,
NodeType.NODE_LOCAL, reserved);
}
if (rackLocalRequest != null && !rackLocalRequest.getRelaxLocality()) {
continue;
}
View Pi.java
@Override
public void cleanup(Context context) throws IOException {
//write output to a file
Configuration conf = context.getConfiguration();
Path outDir = new Path(conf.get(FileOutputFormat.OUTDIR));
Path outFile = new Path(outDir, "reduce-out");
FileSystem fileSys = FileSystem.get(conf);
SequenceFile.Writer writer = SequenceFile.createWriter(fileSys, conf,
outFile, LongWritable.class, LongWritable.class,
CompressionType.NONE);
View Pi_job_fail.java
[shrijeet@xxx-209 ~]$ hadoop jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar pi 5 5
Number of Maps = 5
Samples per Map = 5
Wrote input for Map #0
Wrote input for Map #1
Wrote input for Map #2
Wrote input for Map #3
Wrote input for Map #4
Starting Job
14/11/04 16:53:51 INFO client.RMProxy: Connecting to ResourceManager at yarn-rm.xxx-yyyy.example.net/172.0.0.0:8032
View TestMRCJCFileOutputCommitter_test_fail.java
-------------------------------------------------------
T E S T S
-------------------------------------------------------
Running org.apache.hadoop.mapred.TestMRCJCFileOutputCommitter
Tests run: 3, Failures: 1, Errors: 1, Skipped: 0, Time elapsed: 1.612 sec <<< FAILURE! - in org.apache.hadoop.mapred.TestMRCJCFileOutputCommitter
testAbort(org.apache.hadoop.mapred.TestMRCJCFileOutputCommitter) Time elapsed: 0.116 sec <<< ERROR!
java.lang.NullPointerException: null
at org.apache.hadoop.mapred.TestMRCJCFileOutputCommitter.testAbort(TestMRCJCFileOutputCommitter.java:144)
testFailAbort(org.apache.hadoop.mapred.TestMRCJCFileOutputCommitter) Time elapsed: 0.071 sec <<< FAILURE!
View StringFunnel.java
package com.example;
import com.google.common.hash.Funnel;
import com.google.common.hash.PrimitiveSink;
public enum StringFunnel implements Funnel<String> {
INSTANCE;
@Override
public void funnel(String from, PrimitiveSink into) {
View KeyValue.java
+ 4 // int: Total length of the whole KeyValue.
+ 4 // int: Total length of the key part of the KeyValue.
+ 4 // int: Total length of the value part of the KeyValue.
+ 2 // short: Row key length.
+ key.length // The row key.
+ 1 // byte: Family length.
+ family.length // The family.
+ qualifier.length // The qualifier.
+ 8 // long: The timestamp.
+ 1 // byte: The type of KeyValue.
View hive_no_system_exit.patch
if (tsk.ifRetryCmdWhenFail()) {
- if (running.size() != 0) {
+ if (running.size() != 0 && executeTasksInParallel()) {
taskCleanup();
}
// in case we decided to run everything in local mode, restore the
@@ -1183,7 +1183,7 @@ public class Driver implements CommandProcessor {
}
SQLState = "08S01";
console.printError(errorMessage);
View .vimrc
" VIM Configuration File
" Description: Optimized for C/C++ development, but useful also for other things.
" Author: Gerhard Gappmeier
"
" set UTF-8 encoding
set enc=utf-8
set fenc=utf-8
set termencoding=utf-8
" disable vi compatibility (emulation of old bugs)
View npe_beeswax_1.java
java.lang.NullPointerException
at com.cloudera.beeswax.BeeswaxServiceImpl$RunningQueryState.access$600(BeeswaxServiceImpl.java:124)
at com.cloudera.beeswax.BeeswaxServiceImpl.doWithState(BeeswaxServiceImpl.java:770)
at com.cloudera.beeswax.BeeswaxServiceImpl.fetch(BeeswaxServiceImpl.java:980)
at com.cloudera.beeswax.api.BeeswaxService$Processor$fetch.getResult(BeeswaxService.java:987)
at com.cloudera.beeswax.api.BeeswaxService$Processor$fetch.getResult(BeeswaxService.java:971)
at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:206)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
View gist:5314936
/etc/init.d/hadoop-hdfs-namenode
/etc/default/hadoop
/etc/default/hadoop-0.20-mapreduce
/etc/default/hadoop-fuse
/etc/default/hadoop-hdfs
/etc/default/hadoop-hdfs-namenode
/etc/default/hadoop-hdfs-secondarynamenode
/usr/lib/hadoop/libexec/hadoop-config.sh
/usr/lib/hadoop/libexec/hadoop-layout.sh
/etc/hadoop/conf/hadoop-env.sh
Something went wrong with that request. Please try again.