Skip to content

Instantly share code, notes, and snippets.

@wchan2
Last active September 29, 2015 03:55
Show Gist options
  • Save wchan2/32fc18528b8f88e6ad9c to your computer and use it in GitHub Desktop.
Save wchan2/32fc18528b8f88e6ad9c to your computer and use it in GitHub Desktop.
Orphaned Web Pages Finder using Hadoop
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.io.IOException;
import java.util.StringTokenizer;
import java.util.Set;
import java.util.TreeSet;
public class OrphanPages extends Configured implements Tool {
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new OrphanPages(), args);
System.exit(res);
}
// <<< Don't Change
@Override
public int run(String[] args) throws Exception {
Configuration conf = getConf();
Job job = Job.getInstance(conf, "orphanedpages");
job.setOutputKeyClass(IntWritable.class);
job.setOutputValueClass(NullWritable.class);
job.setMapOutputKeyClass(IntWritable.class);
job.setMapOutputValueClass(IntWritable.class);
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.setMapperClass(LinkCountMap.class);
job.setReducerClass(OrphanPageReduce.class);
job.setJarByClass(OrphanPages.class);
return job.waitForCompletion(true) ? 0 : 1;
}
public static class LinkCountMap extends Mapper<Object, Text, IntWritable, IntWritable> {
@Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line, " :");
IntWritable page = new IntWritable(Integer.parseInt(tokenizer.nextToken().trim()));
context.write(page, new IntWritable(0));
while (tokenizer.hasMoreTokens()) {
String linkedPage = tokenizer.nextToken().trim();
context.write(new IntWritable(Integer.parseInt(linkedPage)), new IntWritable(1));
}
}
}
public static class OrphanPageReduce extends Reducer<IntWritable, IntWritable, IntWritable, NullWritable> {
Set<Integer> linkToCount = new TreeSet<Integer>();
@Override
public void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int count = 0;
for (IntWritable value : values) {
count += value.get();
}
if (count == 0) {
context.write(key, NullWritable.get());
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment