use of org.apache.hadoop.hbase.client.HBaseAdmin in project hbase by apache.
the class TestBackupDeleteRestore method testBackupDeleteRestore.
/**
* Verify that load data- backup - delete some data - restore works as expected - deleted data get
* restored.
* @throws Exception
*/
@Test
public void testBackupDeleteRestore() throws Exception {
LOG.info("test full restore on a single table empty table");
List<TableName> tables = Lists.newArrayList(table1);
String backupId = fullTableBackup(tables);
assertTrue(checkSucceeded(backupId));
LOG.info("backup complete");
int numRows = TEST_UTIL.countRows(table1);
HBaseAdmin hba = TEST_UTIL.getHBaseAdmin();
// delete row
try (Table table = TEST_UTIL.getConnection().getTable(table1)) {
Delete delete = new Delete("row0".getBytes());
table.delete(delete);
hba.flush(table1);
}
TableName[] tableset = new TableName[] { table1 };
// new TableName[] { table1_restore };
TableName[] tablemap = null;
BackupAdmin client = getBackupAdmin();
client.restore(BackupUtils.createRestoreRequest(BACKUP_ROOT_DIR, backupId, false, tableset, tablemap, true));
int numRowsAfterRestore = TEST_UTIL.countRows(table1);
assertEquals(numRows, numRowsAfterRestore);
hba.close();
}
use of org.apache.hadoop.hbase.client.HBaseAdmin in project hive by apache.
the class HBaseQTestUtil method cleanUp.
@Override
public void cleanUp(String tname) throws Exception {
super.cleanUp(tname);
// drop in case leftover from unsuccessful run
db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, HBASE_SRC_NAME);
HBaseAdmin admin = null;
try {
admin = new HBaseAdmin(conn.getConfiguration());
if (hbaseTableSnapshotExists(admin, HBASE_SRC_SNAPSHOT_NAME)) {
admin.deleteSnapshot(HBASE_SRC_SNAPSHOT_NAME);
}
} finally {
if (admin != null)
admin.close();
}
}
use of org.apache.hadoop.hbase.client.HBaseAdmin in project hive by apache.
the class SkeletonHBaseTest method createTable.
protected void createTable(String tableName, String[] families) {
try {
HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
HTableDescriptor tableDesc = new HTableDescriptor(tableName);
for (String family : families) {
HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
tableDesc.addFamily(columnDescriptor);
}
admin.createTable(tableDesc);
} catch (Exception e) {
e.printStackTrace();
throw new IllegalStateException(e);
}
}
use of org.apache.hadoop.hbase.client.HBaseAdmin in project hive by apache.
the class TestPigHBaseStorageHandler method testPigPopulation.
@Test
public void testPigPopulation() throws Exception {
Initialize();
String tableName = newTableName("MyTable");
String databaseName = newTableName("MyDatabase");
//Table name will be lower case unless specified by hbase.table.name property
String hbaseTableName = (databaseName + "." + tableName).toLowerCase();
String db_dir = HCatUtil.makePathASafeFileName(getTestDir() + "/hbasedb");
String POPTXT_FILE_NAME = db_dir + "testfile.txt";
float f = -100.1f;
String dbQuery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '" + db_dir + "'";
String deleteQuery = "DROP TABLE " + databaseName + "." + tableName;
String tableQuery = "CREATE TABLE " + databaseName + "." + tableName + "(key int, testqualifier1 float, testqualifier2 string) STORED BY " + "'org.apache.hadoop.hive.hbase.HBaseStorageHandler'" + " WITH SERDEPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')" + " TBLPROPERTIES ('hbase.table.default.storage.type'='binary')";
String selectQuery = "SELECT * from " + databaseName.toLowerCase() + "." + tableName.toLowerCase();
CommandProcessorResponse responseOne = driver.run(deleteQuery);
assertEquals(0, responseOne.getResponseCode());
CommandProcessorResponse responseTwo = driver.run(dbQuery);
assertEquals(0, responseTwo.getResponseCode());
CommandProcessorResponse responseThree = driver.run(tableQuery);
HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
boolean doesTableExist = hAdmin.tableExists(hbaseTableName);
assertTrue(doesTableExist);
createTestDataFile(POPTXT_FILE_NAME);
PigServer server = new PigServer(ExecType.LOCAL, hcatConf.getAllProperties());
server.registerQuery("A = load '" + POPTXT_FILE_NAME + "' using PigStorage() as (key:int, testqualifier1:float, testqualifier2:chararray);");
server.registerQuery("B = filter A by (key > 2) AND (key < 8) ;");
server.registerQuery("store B into '" + databaseName.toLowerCase() + "." + tableName.toLowerCase() + "' using org.apache.hive.hcatalog.pig.HCatStorer();");
server.registerQuery("C = load '" + databaseName.toLowerCase() + "." + tableName.toLowerCase() + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
// Schema should be same
Schema dumpedBSchema = server.dumpSchema("C");
List<FieldSchema> fields = dumpedBSchema.getFields();
assertEquals(3, fields.size());
assertEquals(DataType.INTEGER, fields.get(0).type);
assertEquals("key", fields.get(0).alias.toLowerCase());
assertEquals(DataType.FLOAT, fields.get(1).type);
assertEquals("testQualifier1".toLowerCase(), fields.get(1).alias.toLowerCase());
assertEquals(DataType.CHARARRAY, fields.get(2).type);
assertEquals("testQualifier2".toLowerCase(), fields.get(2).alias.toLowerCase());
//Query the hbase table and check the key is valid and only 5 are present
Configuration conf = new Configuration(getHbaseConf());
HTable table = new HTable(conf, hbaseTableName);
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("testFamily"));
byte[] familyNameBytes = Bytes.toBytes("testFamily");
ResultScanner scanner = table.getScanner(scan);
int index = 3;
int count = 0;
for (Result result : scanner) {
//key is correct
assertEquals(index, Bytes.toInt(result.getRow()));
//first column exists
assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes("testQualifier1")));
//value is correct
assertEquals((index + f), Bytes.toFloat(result.getValue(familyNameBytes, Bytes.toBytes("testQualifier1"))), 0);
//second column exists
assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes("testQualifier2")));
//value is correct
assertEquals(("textB-" + index).toString(), Bytes.toString(result.getValue(familyNameBytes, Bytes.toBytes("testQualifier2"))));
index++;
count++;
}
// 5 rows should be returned
assertEquals(count, 5);
//Check if hive returns results correctly
driver.run(selectQuery);
ArrayList<String> result = new ArrayList<String>();
driver.getResults(result);
//Query using the hive command line
assertEquals(5, result.size());
Iterator<String> itr = result.iterator();
for (int i = 3; i <= 7; i++) {
String[] tokens = itr.next().split("\\s+");
assertEquals(i, Integer.parseInt(tokens[0]));
assertEquals(i + f, Float.parseFloat(tokens[1]), 0);
assertEquals(("textB-" + i).toString(), tokens[2]);
}
//delete the table from the database
CommandProcessorResponse responseFour = driver.run(deleteQuery);
assertEquals(0, responseFour.getResponseCode());
}
use of org.apache.hadoop.hbase.client.HBaseAdmin in project SpyGlass by ParallelAI.
the class HBaseRawTap method createResource.
@Override
public boolean createResource(JobConf jobConf) throws IOException {
HBaseAdmin hBaseAdmin = getHBaseAdmin(jobConf);
if (hBaseAdmin.tableExists(tableName)) {
return true;
}
LOG.info("creating hbase table: {}", tableName);
HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
String[] familyNames = ((HBaseRawScheme) getScheme()).getFamilyNames();
for (String familyName : familyNames) {
tableDescriptor.addFamily(new HColumnDescriptor(familyName));
}
hBaseAdmin.createTable(tableDescriptor);
return true;
}
Aggregations