Skip to content

Instantly share code, notes, and snippets.

@cykl
Created January 1, 2015 17:39
Show Gist options
  • Save cykl/812ba631f0abbfe09260 to your computer and use it in GitHub Desktop.
Save cykl/812ba631f0abbfe09260 to your computer and use it in GitHub Desktop.
Demonstrate how to "abuse" ApplicationClassLoader to relieve JAR version conflicts in Hadoop launchers. (Has been fixed in Hadoop 2.6.0, see https://issues.apache.org/jira/browse/HADOOP-10893)
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class Main {
private static String TOOL_CL_NAME = "MyTool";
public static void main(String[] args) throws Exception {
RunJarClassLoader toolClassLoader = RunJarClassLoader.fromContextClassLoader();
toolClassLoader.installAsContextClassLoader();
Tool tool = toolClassLoader.create(TOOL_CL_NAME);
int exitCode = ToolRunner.run(tool, args);
System.exit(exitCode);
}
}
import com.google.common.base.Splitter;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.joda.time.DateTime;
import java.io.IOException;
public class MyMapper extends Mapper<LongWritable, Text, NullWritable, NullWritable> {
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
System.out.println(DateTime.class.getClassLoader());
System.out.println(Splitter.class.getClassLoader());
}
}
import com.google.common.base.Splitter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.Tool;
import org.joda.time.DateTime;
public class MyTool extends Configured implements Tool {
@Override
public int run(String[] args) throws Exception {
System.out.println(DateTime.now());
System.out.println(Splitter.class.getClassLoader());
Configuration conf = getConf();
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true);
Job job = new Job(conf, "wordcount");
job.setJarByClass(this.getClass());
job.setMapperClass(MyMapper.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(NullOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
boolean success = job.waitForCompletion(true);
return success ? 0 : 1;
}
}
import org.apache.hadoop.yarn.util.ApplicationClassLoader;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Collections;
public class RunJarClassLoader extends ApplicationClassLoader {
public static RunJarClassLoader fromContextClassLoader() {
URLClassLoader ctxClassLoader = (URLClassLoader) Thread.currentThread().getContextClassLoader();
return new RunJarClassLoader(ctxClassLoader.getURLs(), ctxClassLoader.getParent());
}
public RunJarClassLoader(URL[] urls, ClassLoader parent) {
super(urls, parent, Collections.<String>emptyList());
}
public void installAsContextClassLoader() {
Thread.currentThread().setContextClassLoader(this);
}
@SuppressWarnings("unchecked")
public <T> T create(String className) throws ClassNotFoundException, IllegalAccessException, InstantiationException {
Class<T> toolClass = (Class<T>) Class.forName(className, true, this);
return toolClass.newInstance();
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment