Search in sources :

Example 11 with HBaseAdmin

use of org.apache.hadoop.hbase.client.HBaseAdmin in project hbase by apache.

the class TestBackupDeleteRestore method testBackupDeleteRestore.

/**
   * Verify that load data- backup - delete some data - restore works as expected - deleted data get
   * restored.
   * @throws Exception
   */
@Test
public void testBackupDeleteRestore() throws Exception {
    LOG.info("test full restore on a single table empty table");
    List<TableName> tables = Lists.newArrayList(table1);
    String backupId = fullTableBackup(tables);
    assertTrue(checkSucceeded(backupId));
    LOG.info("backup complete");
    int numRows = TEST_UTIL.countRows(table1);
    HBaseAdmin hba = TEST_UTIL.getHBaseAdmin();
    // delete row
    try (Table table = TEST_UTIL.getConnection().getTable(table1)) {
        Delete delete = new Delete("row0".getBytes());
        table.delete(delete);
        hba.flush(table1);
    }
    TableName[] tableset = new TableName[] { table1 };
    // new TableName[] { table1_restore };
    TableName[] tablemap = null;
    BackupAdmin client = getBackupAdmin();
    client.restore(BackupUtils.createRestoreRequest(BACKUP_ROOT_DIR, backupId, false, tableset, tablemap, true));
    int numRowsAfterRestore = TEST_UTIL.countRows(table1);
    assertEquals(numRows, numRowsAfterRestore);
    hba.close();
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) TableName(org.apache.hadoop.hbase.TableName) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) Table(org.apache.hadoop.hbase.client.Table) Test(org.junit.Test)

Example 12 with HBaseAdmin

use of org.apache.hadoop.hbase.client.HBaseAdmin in project hive by apache.

the class HBaseQTestUtil method cleanUp.

@Override
public void cleanUp(String tname) throws Exception {
    super.cleanUp(tname);
    // drop in case leftover from unsuccessful run
    db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, HBASE_SRC_NAME);
    HBaseAdmin admin = null;
    try {
        admin = new HBaseAdmin(conn.getConfiguration());
        if (hbaseTableSnapshotExists(admin, HBASE_SRC_SNAPSHOT_NAME)) {
            admin.deleteSnapshot(HBASE_SRC_SNAPSHOT_NAME);
        }
    } finally {
        if (admin != null)
            admin.close();
    }
}
Also used : HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin)

Example 13 with HBaseAdmin

use of org.apache.hadoop.hbase.client.HBaseAdmin in project hive by apache.

the class SkeletonHBaseTest method createTable.

protected void createTable(String tableName, String[] families) {
    try {
        HBaseAdmin admin = new HBaseAdmin(getHbaseConf());
        HTableDescriptor tableDesc = new HTableDescriptor(tableName);
        for (String family : families) {
            HColumnDescriptor columnDescriptor = new HColumnDescriptor(family);
            tableDesc.addFamily(columnDescriptor);
        }
        admin.createTable(tableDesc);
    } catch (Exception e) {
        e.printStackTrace();
        throw new IllegalStateException(e);
    }
}
Also used : HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 14 with HBaseAdmin

use of org.apache.hadoop.hbase.client.HBaseAdmin in project hive by apache.

the class TestPigHBaseStorageHandler method testPigPopulation.

@Test
public void testPigPopulation() throws Exception {
    Initialize();
    String tableName = newTableName("MyTable");
    String databaseName = newTableName("MyDatabase");
    //Table name will be lower case unless specified by hbase.table.name property
    String hbaseTableName = (databaseName + "." + tableName).toLowerCase();
    String db_dir = HCatUtil.makePathASafeFileName(getTestDir() + "/hbasedb");
    String POPTXT_FILE_NAME = db_dir + "testfile.txt";
    float f = -100.1f;
    String dbQuery = "CREATE DATABASE IF NOT EXISTS " + databaseName + " LOCATION '" + db_dir + "'";
    String deleteQuery = "DROP TABLE " + databaseName + "." + tableName;
    String tableQuery = "CREATE TABLE " + databaseName + "." + tableName + "(key int, testqualifier1 float, testqualifier2 string) STORED BY " + "'org.apache.hadoop.hive.hbase.HBaseStorageHandler'" + " WITH SERDEPROPERTIES ('hbase.columns.mapping'=':key,testFamily:testQualifier1,testFamily:testQualifier2')" + " TBLPROPERTIES ('hbase.table.default.storage.type'='binary')";
    String selectQuery = "SELECT * from " + databaseName.toLowerCase() + "." + tableName.toLowerCase();
    CommandProcessorResponse responseOne = driver.run(deleteQuery);
    assertEquals(0, responseOne.getResponseCode());
    CommandProcessorResponse responseTwo = driver.run(dbQuery);
    assertEquals(0, responseTwo.getResponseCode());
    CommandProcessorResponse responseThree = driver.run(tableQuery);
    HBaseAdmin hAdmin = new HBaseAdmin(getHbaseConf());
    boolean doesTableExist = hAdmin.tableExists(hbaseTableName);
    assertTrue(doesTableExist);
    createTestDataFile(POPTXT_FILE_NAME);
    PigServer server = new PigServer(ExecType.LOCAL, hcatConf.getAllProperties());
    server.registerQuery("A = load '" + POPTXT_FILE_NAME + "' using PigStorage() as (key:int, testqualifier1:float, testqualifier2:chararray);");
    server.registerQuery("B = filter A by (key > 2) AND (key < 8) ;");
    server.registerQuery("store B into '" + databaseName.toLowerCase() + "." + tableName.toLowerCase() + "' using  org.apache.hive.hcatalog.pig.HCatStorer();");
    server.registerQuery("C = load '" + databaseName.toLowerCase() + "." + tableName.toLowerCase() + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
    // Schema should be same
    Schema dumpedBSchema = server.dumpSchema("C");
    List<FieldSchema> fields = dumpedBSchema.getFields();
    assertEquals(3, fields.size());
    assertEquals(DataType.INTEGER, fields.get(0).type);
    assertEquals("key", fields.get(0).alias.toLowerCase());
    assertEquals(DataType.FLOAT, fields.get(1).type);
    assertEquals("testQualifier1".toLowerCase(), fields.get(1).alias.toLowerCase());
    assertEquals(DataType.CHARARRAY, fields.get(2).type);
    assertEquals("testQualifier2".toLowerCase(), fields.get(2).alias.toLowerCase());
    //Query the hbase table and check the key is valid and only 5  are present
    Configuration conf = new Configuration(getHbaseConf());
    HTable table = new HTable(conf, hbaseTableName);
    Scan scan = new Scan();
    scan.addFamily(Bytes.toBytes("testFamily"));
    byte[] familyNameBytes = Bytes.toBytes("testFamily");
    ResultScanner scanner = table.getScanner(scan);
    int index = 3;
    int count = 0;
    for (Result result : scanner) {
        //key is correct
        assertEquals(index, Bytes.toInt(result.getRow()));
        //first column exists
        assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes("testQualifier1")));
        //value is correct
        assertEquals((index + f), Bytes.toFloat(result.getValue(familyNameBytes, Bytes.toBytes("testQualifier1"))), 0);
        //second column exists
        assertTrue(result.containsColumn(familyNameBytes, Bytes.toBytes("testQualifier2")));
        //value is correct
        assertEquals(("textB-" + index).toString(), Bytes.toString(result.getValue(familyNameBytes, Bytes.toBytes("testQualifier2"))));
        index++;
        count++;
    }
    // 5 rows should be returned
    assertEquals(count, 5);
    //Check if hive returns results correctly
    driver.run(selectQuery);
    ArrayList<String> result = new ArrayList<String>();
    driver.getResults(result);
    //Query using the hive command line
    assertEquals(5, result.size());
    Iterator<String> itr = result.iterator();
    for (int i = 3; i <= 7; i++) {
        String[] tokens = itr.next().split("\\s+");
        assertEquals(i, Integer.parseInt(tokens[0]));
        assertEquals(i + f, Float.parseFloat(tokens[1]), 0);
        assertEquals(("textB-" + i).toString(), tokens[2]);
    }
    //delete the table from the database
    CommandProcessorResponse responseFour = driver.run(deleteQuery);
    assertEquals(0, responseFour.getResponseCode());
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Configuration(org.apache.hadoop.conf.Configuration) CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) Schema(org.apache.pig.impl.logicalLayer.schema.Schema) FieldSchema(org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema) FieldSchema(org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema) ArrayList(java.util.ArrayList) HTable(org.apache.hadoop.hbase.client.HTable) Result(org.apache.hadoop.hbase.client.Result) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) PigServer(org.apache.pig.PigServer) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Example 15 with HBaseAdmin

use of org.apache.hadoop.hbase.client.HBaseAdmin in project SpyGlass by ParallelAI.

the class HBaseRawTap method createResource.

@Override
public boolean createResource(JobConf jobConf) throws IOException {
    HBaseAdmin hBaseAdmin = getHBaseAdmin(jobConf);
    if (hBaseAdmin.tableExists(tableName)) {
        return true;
    }
    LOG.info("creating hbase table: {}", tableName);
    HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
    String[] familyNames = ((HBaseRawScheme) getScheme()).getFamilyNames();
    for (String familyName : familyNames) {
        tableDescriptor.addFamily(new HColumnDescriptor(familyName));
    }
    hBaseAdmin.createTable(tableDescriptor);
    return true;
}
Also used : HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Aggregations

HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)180 Test (org.junit.Test)93 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)76 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)72 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)70 Connection (java.sql.Connection)66 Properties (java.util.Properties)54 TableName (org.apache.hadoop.hbase.TableName)36 IOException (java.io.IOException)33 ResultSet (java.sql.ResultSet)27 BaseTest (org.apache.phoenix.query.BaseTest)27 HTable (org.apache.hadoop.hbase.client.HTable)26 SQLException (java.sql.SQLException)22 TestUtil.closeConnection (org.apache.phoenix.util.TestUtil.closeConnection)22 Put (org.apache.hadoop.hbase.client.Put)16 Configuration (org.apache.hadoop.conf.Configuration)13 HTableInterface (org.apache.hadoop.hbase.client.HTableInterface)13 PreparedStatement (java.sql.PreparedStatement)12 PhoenixIOException (org.apache.phoenix.exception.PhoenixIOException)12 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)9