use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.
the class HBaseReadWrite method putStorageDescriptor.
/**
* Place the common parts of a storage descriptor into the cache and write the storage
* descriptor out to HBase. This should only be called if you are sure that the storage
* descriptor needs to be added. If you have changed a table or partition but not it's storage
* descriptor do not call this method, as it will increment the reference count of the storage
* descriptor.
* @param storageDescriptor storage descriptor to store.
* @return id of the entry in the cache, to be written in for the storage descriptor
*/
byte[] putStorageDescriptor(StorageDescriptor storageDescriptor) throws IOException {
byte[] sd = HBaseUtils.serializeStorageDescriptor(storageDescriptor);
byte[] key = HBaseUtils.hashStorageDescriptor(storageDescriptor, md);
byte[] serializedRefCnt = read(SD_TABLE, key, CATALOG_CF, REF_COUNT_COL);
HTableInterface htab = conn.getHBaseTable(SD_TABLE);
if (serializedRefCnt == null) {
// We are the first to put it in the DB
Put p = new Put(key);
p.add(CATALOG_CF, CATALOG_COL, sd);
p.add(CATALOG_CF, REF_COUNT_COL, "1".getBytes(HBaseUtils.ENCODING));
htab.put(p);
sdCache.put(new ByteArrayWrapper(key), storageDescriptor);
} else {
// Just increment the reference count
int refCnt = Integer.parseInt(new String(serializedRefCnt, HBaseUtils.ENCODING)) + 1;
Put p = new Put(key);
p.add(CATALOG_CF, REF_COUNT_COL, Integer.toString(refCnt).getBytes(HBaseUtils.ENCODING));
htab.put(p);
}
conn.flush(htab);
return key;
}
use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.
the class HBaseReadWrite method printPartition.
/**
* Print out a partition.
* @param partKey The key for the partition. This must include dbname.tablename._partkeys_
* where _partkeys_ is a dot separated list of partition values in the proper
* order.
* @return string containing the partition
* @throws IOException
* @throws TException
*/
String printPartition(String partKey) throws IOException, TException {
// First figure out the table and fetch it
String[] partKeyParts = partKey.split(HBaseUtils.KEY_SEPARATOR_STR);
if (partKeyParts.length < 3)
return noSuch(partKey, "partition");
Table table = getTable(partKeyParts[0], partKeyParts[1]);
if (table == null)
return noSuch(partKey, "partition");
byte[] key = HBaseUtils.buildPartitionKey(partKeyParts[0], partKeyParts[1], HBaseUtils.getPartitionKeyTypes(table.getPartitionKeys()), Arrays.asList(Arrays.copyOfRange(partKeyParts, 2, partKeyParts.length)));
@SuppressWarnings("deprecation") HTableInterface htab = conn.getHBaseTable(PART_TABLE);
Get g = new Get(key);
g.addColumn(CATALOG_CF, CATALOG_COL);
g.addFamily(STATS_CF);
Result result = htab.get(g);
if (result.isEmpty())
return noSuch(partKey, "partition");
return printOnePartition(result);
}
use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.
the class HBaseReadWrite method printSequences.
/**
* One method to print all entries in the sequence table. It's not expected to be large.
* @return each sequence as one string
* @throws IOException
*/
List<String> printSequences() throws IOException {
HTableInterface htab = conn.getHBaseTable(SEQUENCES_TABLE);
Iterator<Result> iter = scan(SEQUENCES_TABLE, CATALOG_CF, CATALOG_COL, null);
List<String> sequences = new ArrayList<>();
if (!iter.hasNext())
return Arrays.asList("No sequences");
while (iter.hasNext()) {
Result result = iter.next();
sequences.add(new StringBuilder(new String(result.getRow(), HBaseUtils.ENCODING)).append(": ").append(new String(result.getValue(CATALOG_CF, CATALOG_COL), HBaseUtils.ENCODING)).toString());
}
return sequences;
}
use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.
the class HBaseReadWrite method store.
private void store(String table, byte[] key, byte[] colFam, byte[][] colName, byte[][] obj) throws IOException {
HTableInterface htab = conn.getHBaseTable(table);
Put p = new Put(key);
for (int i = 0; i < colName.length; i++) {
p.add(colFam, colName[i], obj[i]);
}
htab.put(p);
conn.flush(htab);
}
use of org.apache.hadoop.hbase.client.HTableInterface in project hive by apache.
the class HBaseReadWrite method printSecurity.
/**
* One method to print all rows in the security table. It's not expected to be large.
* @return each row as one string
* @throws IOException
*/
List<String> printSecurity() throws IOException {
HTableInterface htab = conn.getHBaseTable(SECURITY_TABLE);
Scan scan = new Scan();
scan.addColumn(CATALOG_CF, MASTER_KEY_COL);
scan.addColumn(CATALOG_CF, DELEGATION_TOKEN_COL);
Iterator<Result> iter = htab.getScanner(scan).iterator();
if (!iter.hasNext())
return Arrays.asList("No security related entries");
List<String> lines = new ArrayList<>();
while (iter.hasNext()) {
Result result = iter.next();
byte[] val = result.getValue(CATALOG_CF, MASTER_KEY_COL);
if (val != null) {
int seqNo = Integer.parseInt(new String(result.getRow(), HBaseUtils.ENCODING));
lines.add("Master key " + seqNo + ": " + HBaseUtils.deserializeMasterKey(val));
} else {
val = result.getValue(CATALOG_CF, DELEGATION_TOKEN_COL);
if (val == null)
throw new RuntimeException("Huh? No master key, no delegation token!");
lines.add("Delegation token " + new String(result.getRow(), HBaseUtils.ENCODING) + ": " + HBaseUtils.deserializeDelegationToken(val));
}
}
return lines;
}
Aggregations