Skip to content

Instantly share code, notes, and snippets.

@elazarl
Created April 7, 2014 11:23
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save elazarl/10018530 to your computer and use it in GitHub Desktop.
Save elazarl/10018530 to your computer and use it in GitHub Desktop.
/**
* Usage:
* $ javac KillRegions
* $ # list all regions in cluster, search for first listed region in MYTABLE
* $ java -cp `hbase classpath`:. KillRegions -l|grep MYTABLE|head -n1 >/tmp/regions.txt
* $ # delete first regions in MYTABLE, listed above
* $ java -cp `hbase classpath`:. KillRegions </tmp/regions.txt
*/
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.io.CharStreams;
import com.google.common.io.InputSupplier;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.HBaseFsck;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.TTCCLayout;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
/**
* KillRegions would delete all "\n" specified regions from stdin
*/
public class KillRegions extends Configured implements Tool {
private static final int DONE = 0;
private static final int DONE_NOHBCK = 1;
private static final int BAD = 2;
public static void main(String[] args) throws Exception {
// log4j to stderr
Logger rootLogger = Logger.getRootLogger();
rootLogger.removeAllAppenders();
rootLogger.addAppender(new ConsoleAppender(new TTCCLayout(), "System.err"));
// get Hdfs conf
Configuration conf = HBaseConfiguration.create();
Path hbasedir = new Path(conf.get(HConstants.HBASE_DIR));
URI defaultFs = hbasedir.getFileSystem(conf).getUri();
conf.set("fs.defaultFS", defaultFs.toString()); // for hadoop 0.21+
conf.set("fs.default.name", defaultFs.toString()); // for hadoop 0.20
// kill regions
int run = ToolRunner.run(new KillRegions(conf), args);
if (run == DONE_NOHBCK || run == BAD) {
System.exit(run==DONE_NOHBCK ? 0 : run);
}
// hbsck -repair
System.out.println("running hbase hbck -repair");
HBaseFsck.main(new String[] {"-repair"});
}
@Override
public int run(String[] args) throws Exception {
Options options = new Options();
options.addOption("l", false, "list all regions");
options.addOption("loglevel", true, "set log level");
options.addOption("random", true, "list all regions");
CommandLine cmd = new BasicParser().parse(options, args);
if (cmd.hasOption("loglevel")) {
Logger.getRootLogger().setLevel(Level.toLevel(cmd.getOptionValue("loglevel")));
}
if (cmd.hasOption('l')) {
for (String line : getRegions()) {
System.out.println(line);
}
return DONE_NOHBCK;
} else if (cmd.hasOption("random")) {
double probabilityToPrint = Double.parseDouble(cmd.getOptionValue("random"));
List<String> regions = getRegions();
for (String region : regions) {
Random random = new Random();
if (random.nextDouble() < probabilityToPrint) {
System.out.println(region);
}
}
return DONE_NOHBCK;
} else {
deleteFromStdin();
}
return DONE;
}
private List<String> getRegions() throws IOException {
List<String> rv = Lists.newArrayList();
HTable meta = getMeta();
ResultScanner scanner = meta.getScanner(new Scan());
for (Result result : scanner) {
rv.add(Bytes.toString(result.getRow()));
}
return rv;
}
private void deleteFromStdin() throws IOException {
List<Delete> deletes = Lists.newArrayList();
List<String> regions = CharStreams.readLines(new InputSupplier<InputStreamReader>() {
@Override
public InputStreamReader getInput() throws IOException {
return new InputStreamReader(System.in, Charsets.UTF_8);
}
});
for (String region : regions) {
deletes.add(new Delete(region.getBytes(Charsets.US_ASCII)));
}
System.out.println("Deleting Regions from .META.:\n");
HTableInterface table = getMeta();
table.delete(deletes);
if (deletes.isEmpty()) System.out.println("Deletion Succeeded");
else System.out.println("Failed to delete #" + deletes.size() + " rows");
for (Delete delete : deletes) {
System.out.println(Bytes.toString(delete.getRow()));
}
System.out.println("Deleting regions from HDFS");
FileContext fc = FileContext.getFileContext(getConf());
Path hbaseRoot = new Path(getConf().get("hbase.rootdir"));
for (String region : regions) {
String[] regionComps = region.split(",");
String regionId = regionComps[2].split("\\.")[1];
String tableName = regionComps[0];
if (regionId.isEmpty()) throw new IllegalStateException("regionId must be null: " + region);
// if region name has comma, we're dead
Path regionPath = new Path(hbaseRoot, tableName + "/" + regionId);
fc.delete(regionPath, true);
System.out.println("deleting " + regionPath);
}
}
private HTable getMeta() throws IOException {
return new HTable(getConf(), ".META.");
}
public KillRegions(Configuration conf) {
super(conf);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment