public
Created

A program to rename HBase table (works for 0.92.x)

  • Download Gist
RenameTable.java
Java
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162
 
package hbase.experiments;
 
import java.io.FileNotFoundException;
import java.io.IOException;
 
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Writables;
 
public class RenameTable {
 
static final String programName = "RenameTable";
static final Log LOG = LogFactory.getLog(RenameTable.class);
 
static void printUsage() {
System.out.println("Usage: " + programName + " <OLD_NAME> <NEW_NAME>");
}
 
/* Passed 'dir' exists and is a directory else exception */
static void isDirExists(FileSystem fs, Path dir) throws IOException {
if (!fs.exists(dir)) {
throw new IOException("Does not exist " + dir.toString());
} else if (!fs.getFileStatus(dir).isDir()) {
throw new IOException("Not a directory " + dir.toString());
}
}
 
/* Returns true if the region belongs to passed table */
static boolean isTableRegion(byte [] tableName, HRegionInfo hri) {
return Bytes.equals(Bytes.toBytes(hri.getTableNameAsString()), tableName);
}
 
static HRegionInfo createHRI(byte [] tableName, HRegionInfo oldHRI) {
return new HRegionInfo(tableName, oldHRI.getStartKey(), oldHRI.getEndKey(), oldHRI.isSplit());
}
 
static HTableDescriptor getHTableDescriptor(byte [] tableName, FileSystem fs, Configuration conf)
throws FileNotFoundException,
IOException {
return FSTableDescriptors.getTableDescriptor(fs, new Path(conf.get(HConstants.HBASE_DIR)), tableName);
}
 
public static void main(String [] args) throws IOException {
 
if (args.length != 2) {
printUsage();
System.exit(1);
}
String oldTableName = args[0];
String newTableName = args[1];
 
/*
* Set hadoop filesystem configuration using the hbase.rootdir. Otherwise,
* we'll always use localhost though the hbase.rootdir might be pointing at
* hdfs location.
*/
Configuration conf = HBaseConfiguration.create();
conf.set("fs.default.name", conf.get(HConstants.HBASE_DIR));
FileSystem fs = FileSystem.get(conf);
 
/*
* If new table directory does not exit, create it. Keep going if already
* exists because maybe we are rerunning script because it failed first
* time. Otherwise we are overwriting a pre-existing table.
*/
 
Path rootdir = FSUtils.getRootDir(conf);
Path oldTableDir = fs.makeQualified(new Path(rootdir, new Path(oldTableName)));
isDirExists(fs, oldTableDir);
Path newTableDir = fs.makeQualified(new Path(rootdir, newTableName));
if (!fs.exists(newTableDir)) {
fs.mkdirs(newTableDir);
}
 
/* Get hold of oldHTableDescriptor and create a new one */
HTableDescriptor oldHTableDescriptor = getHTableDescriptor(Bytes.toBytes(oldTableName), fs,
conf);
HTableDescriptor newHTableDescriptor = new HTableDescriptor(Bytes.toBytes(newTableName));
for (HColumnDescriptor family : oldHTableDescriptor.getColumnFamilies()) {
newHTableDescriptor.addFamily(family);
}
 
/*
* Run through the meta table moving region mentions from old to new table
* name.
*/
HTable metaTable = new HTable(conf, HConstants.META_TABLE_NAME);
Scan scan = new Scan();
ResultScanner scanner = metaTable.getScanner(scan);
Result result;
while ((result = scanner.next()) != null) {
String rowId = Bytes.toString(result.getRow());
HRegionInfo oldHRI = Writables.getHRegionInfo(result.getValue(HConstants.CATALOG_FAMILY,
HConstants.REGIONINFO_QUALIFIER));
if (oldHRI == null) {
throw new IOException("HRegionInfo is null for " + rowId);
}
if (!isTableRegion(Bytes.toBytes(oldTableName), oldHRI)) {
continue;
}
System.out.println(oldHRI.toString());
Path oldRDir = new Path(oldTableDir, new Path(oldHRI.getEncodedName()));
if (!fs.exists(oldRDir)) {
LOG.warn(oldRDir.toString() + " does not exist -- region " + oldHRI.getRegionNameAsString());
}
 
/* Now make a new HRegionInfo to add to .META. for the new region. */
HRegionInfo newHRI = createHRI(Bytes.toBytes(newTableName), oldHRI);
System.out.println(newHRI);
Path newRDir = new Path(newTableDir, new Path(newHRI.getEncodedName()));
LOG.info("Renaming " + oldRDir.toString() + " as " + newRDir.toString());
fs.rename(oldRDir, newRDir);
 
/* Removing old region from meta */
LOG.info("Removing " + rowId + " from .META.");
Delete d = new Delete(result.getRow());
metaTable.delete(d);
 
/* Create 'new' region */
HRegion newR = new HRegion(newTableDir, null, fs, conf, newHRI, newHTableDescriptor, null);
LOG.info("Adding to meta: " + newR.toString());
byte [] newRbytes = Writables.getBytes(newR.getRegionInfo());
Put p = new Put(newR.getRegionName());
p.add(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER, newRbytes);
metaTable.put(p);
/*
* Finally update the .regioninfo under new region location so it has new
* name.
*/
Path regioninfofile = new Path(newR.getRegionDir(), HRegion.REGIONINFO_FILE);
fs.delete(regioninfofile, true);
FSDataOutputStream out = fs.create(regioninfofile);
newR.getRegionInfo().write(out);
out.close();
}
scanner.close();
FSTableDescriptors.createTableDescriptor(newHTableDescriptor,conf);
fs.delete(oldTableDir);
LOG.info("DONE");
}
 
}

Please sign in to comment on this gist.

Something went wrong with that request. Please try again.