Created
November 22, 2013 21:44
-
-
Save thejasmn/7607406 to your computer and use it in GitHub Desktop.
mapreduce program using hcatalog to write output
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
public class WriteRC extends Configured implements Tool { | |
public static class Map extends | |
Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord> { | |
String name; | |
Integer age; | |
Double gpa; | |
@Override | |
protected void map( | |
WritableComparable key, | |
HCatRecord value, | |
org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord>.Context context) | |
throws IOException, InterruptedException { | |
name = value.get(0)==null?null:(String)value.get(0); | |
age = value.get(1)==null?null:(Integer)value.get(1); | |
gpa = value.get(2)==null?null:(Double)value.get(2); | |
if (gpa != null) gpa = Math.floor(gpa) + 0.1; | |
HCatRecord record = new DefaultHCatRecord(5); | |
record.set(0, name); | |
record.set(1, age); | |
record.set(2, gpa); | |
context.write(null, record); | |
} | |
} | |
public int run(String[] args) throws Exception { | |
Configuration conf = getConf(); | |
args = new GenericOptionsParser(conf, args).getRemainingArgs(); | |
String serverUri = args[0]; | |
String inputTableName = args[1]; | |
String outputTableName = args[2]; | |
String dbName = null; | |
String principalID = System | |
.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL); | |
if (principalID != null) | |
conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID); | |
Job job = new Job(conf, "WriteRC"); | |
HCatInputFormat.setInput(job, InputJobInfo.create(dbName, | |
inputTableName, null, null)); | |
// initialize HCatOutputFormat | |
job.setInputFormatClass(HCatInputFormat.class); | |
job.setJarByClass(WriteRC.class); | |
job.setMapperClass(Map.class); | |
job.setOutputKeyClass(WritableComparable.class); | |
job.setOutputValueClass(DefaultHCatRecord.class); | |
job.setNumReduceTasks(0); | |
HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName, | |
outputTableName, null)); | |
HCatSchema s = HCatInputFormat.getTableSchema(job); | |
System.err.println("INFO: output schema explicitly set for writing:" | |
+ s); | |
HCatOutputFormat.setSchema(job, s); | |
job.setOutputFormatClass(HCatOutputFormat.class); | |
return (job.waitForCompletion(true) ? 0 : 1); | |
} | |
public static void main(String[] args) throws Exception { | |
int exitCode = ToolRunner.run(new WriteRC(), args); | |
System.exit(exitCode); | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment