Search in sources :

Example 51 with TableId

use of io.cdap.cdap.data2.util.TableId in project cdap by caskdata.

the class HBase10CDHTableUtil method listTablesInNamespace.

@Override
public List<TableId> listTablesInNamespace(HBaseAdmin admin, String namespaceId) throws IOException {
    List<TableId> tableIds = Lists.newArrayList();
    HTableDescriptor[] hTableDescriptors = admin.listTableDescriptorsByNamespace(HTableNameConverter.encodeHBaseEntity(namespaceId));
    for (HTableDescriptor hTableDescriptor : hTableDescriptors) {
        if (isCDAPTable(hTableDescriptor)) {
            tableIds.add(HTableNameConverter.from(hTableDescriptor));
        }
    }
    return tableIds;
}
Also used : TableId(io.cdap.cdap.data2.util.TableId) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 52 with TableId

use of io.cdap.cdap.data2.util.TableId in project cdap by caskdata.

the class LevelDBTableService method getTableStats.

/**
 * Gets tables stats.
 *
 * @return map of table name -> table stats entries
 * @throws Exception
 */
public Map<TableId, TableStats> getTableStats() throws Exception {
    ensureOpen();
    File baseDir = new File(basePath);
    File[] subDirs = baseDir.listFiles();
    if (subDirs == null) {
        return ImmutableMap.of();
    }
    ImmutableMap.Builder<TableId, TableStats> builder = ImmutableMap.builder();
    for (File dir : subDirs) {
        String tableName = getTableName(dir.getName());
        // NOTE: we are using recursion to traverse file tree as we know that leveldb table fs tree is couple levels deep.
        long size = getSize(dir);
        builder.put(LevelDBNameConverter.from(tableName), new TableStats(size));
    }
    return builder.build();
}
Also used : TableId(io.cdap.cdap.data2.util.TableId) File(java.io.File) ImmutableMap(com.google.common.collect.ImmutableMap)

Example 53 with TableId

use of io.cdap.cdap.data2.util.TableId in project cdap by caskdata.

the class HBaseMetricsTable method putBytes.

@Override
public void putBytes(SortedMap<byte[], ? extends SortedMap<byte[], byte[]>> updates) {
    List<Put> puts = Lists.newArrayList();
    for (Map.Entry<byte[], ? extends SortedMap<byte[], byte[]>> row : updates.entrySet()) {
        byte[] distributedKey = createDistributedRowKey(row.getKey());
        PutBuilder put = tableUtil.buildPut(distributedKey);
        for (Map.Entry<byte[], byte[]> column : row.getValue().entrySet()) {
            put.add(columnFamily, column.getKey(), column.getValue());
        }
        puts.add(put.build());
    }
    try {
        mutator.mutate(puts);
        mutator.flush();
    } catch (IOException e) {
        throw new DataSetException("Put failed on table " + tableId, e);
    }
}
Also used : PutBuilder(io.cdap.cdap.data2.util.hbase.PutBuilder) DataSetException(io.cdap.cdap.api.dataset.DataSetException) IOException(java.io.IOException) Map(java.util.Map) NavigableMap(java.util.NavigableMap) SortedMap(java.util.SortedMap) Put(org.apache.hadoop.hbase.client.Put)

Example 54 with TableId

use of io.cdap.cdap.data2.util.TableId in project cdap by caskdata.

the class HBaseMetricsTable method delete.

@Override
public void delete(byte[] row, byte[][] columns) {
    byte[] distributedKey = createDistributedRowKey(row);
    DeleteBuilder delete = tableUtil.buildDelete(distributedKey);
    for (byte[] column : columns) {
        delete.deleteColumns(columnFamily, column);
    }
    try {
        table.delete(delete.build());
    } catch (IOException e) {
        throw new DataSetException("Delete failed on table " + tableId, e);
    }
}
Also used : DataSetException(io.cdap.cdap.api.dataset.DataSetException) IOException(java.io.IOException) DeleteBuilder(io.cdap.cdap.data2.util.hbase.DeleteBuilder)

Example 55 with TableId

use of io.cdap.cdap.data2.util.TableId in project cdap by caskdata.

the class HBaseTableAdmin method create.

@Override
public void create() throws IOException {
    String columnFamily = Bytes.toString(TableProperties.getColumnFamilyBytes(spec.getProperties()));
    ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder(columnFamily, hConf);
    if (TableProperties.getReadlessIncrementSupport(spec.getProperties())) {
        cfdBuilder.setMaxVersions(Integer.MAX_VALUE);
    } else if (DatasetsUtil.isTransactional(spec.getProperties())) {
        // NOTE: we cannot limit number of versions as there's no hard limit on # of excluded from read txs
        cfdBuilder.setMaxVersions(Integer.MAX_VALUE);
    } else {
        cfdBuilder.setMaxVersions(1);
    }
    cfdBuilder.setBloomType(ColumnFamilyDescriptor.BloomType.ROW);
    Long ttl = TableProperties.getTTL(spec.getProperties());
    if (ttl != null) {
        // convert ttl from seconds to milli-seconds
        ttl = TimeUnit.SECONDS.toMillis(ttl);
        cfdBuilder.addProperty(TxConstants.PROPERTY_TTL, String.valueOf(ttl));
    }
    final TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(tableId, cConf);
    // if the dataset is configured for read-less increments, then set the table property to support upgrades
    boolean supportsReadlessIncrements = TableProperties.getReadlessIncrementSupport(spec.getProperties());
    if (supportsReadlessIncrements) {
        tdBuilder.addProperty(Table.PROPERTY_READLESS_INCREMENT, "true");
    }
    // if the dataset is configured to be non-transactional, then set the table property to support upgrades
    if (!DatasetsUtil.isTransactional(spec.getProperties())) {
        tdBuilder.addProperty(Constants.Dataset.TABLE_TX_DISABLED, "true");
        if (supportsReadlessIncrements) {
            // read-less increments CPs by default assume that table is transactional
            cfdBuilder.addProperty("dataset.table.readless.increment.transactional", "false");
        }
    }
    tdBuilder.addColumnFamily(cfdBuilder.build());
    CoprocessorJar coprocessorJar = createCoprocessorJar();
    for (Class<? extends Coprocessor> coprocessor : coprocessorJar.getCoprocessors()) {
        tdBuilder.addCoprocessor(coprocessorManager.getCoprocessorDescriptor(coprocessor, coprocessorJar.getPriority(coprocessor)));
    }
    byte[][] splits = null;
    String splitsProperty = spec.getProperty(PROPERTY_SPLITS);
    if (splitsProperty != null) {
        splits = GSON.fromJson(splitsProperty, byte[][].class);
    }
    // Disable split policy
    String splitsPolicy = spec.getProperty(SPLIT_POLICY);
    if (!Strings.isNullOrEmpty(splitsPolicy)) {
        tdBuilder.addProperty(HTableDescriptor.SPLIT_POLICY, splitsPolicy);
    }
    try (HBaseDDLExecutor ddlExecutor = ddlExecutorFactory.get()) {
        ddlExecutor.createTableIfNotExists(tdBuilder.build(), splits);
        try {
            Map<String, String> permissions = TableProperties.getTablePermissions(spec.getProperties());
            if (permissions != null && !permissions.isEmpty()) {
                tableUtil.grantPermissions(ddlExecutor, tableId, permissions);
            }
        } catch (IOException | RuntimeException e) {
            try {
                drop();
            } catch (Throwable t) {
                e.addSuppressed(t);
            }
            throw e;
        }
    }
}
Also used : HBaseDDLExecutor(io.cdap.cdap.spi.hbase.HBaseDDLExecutor) ColumnFamilyDescriptorBuilder(io.cdap.cdap.data2.util.hbase.ColumnFamilyDescriptorBuilder) HTableDescriptorBuilder(io.cdap.cdap.data2.util.hbase.HTableDescriptorBuilder) TableDescriptorBuilder(io.cdap.cdap.data2.util.hbase.TableDescriptorBuilder) IOException(java.io.IOException)

Aggregations

TableId (co.cask.cdap.data2.util.TableId)102 Test (org.junit.Test)95 TableId (io.cdap.cdap.data2.util.TableId)78 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)60 Put (org.apache.hadoop.hbase.client.Put)57 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)55 Cell (org.apache.hadoop.hbase.Cell)42 Scan (org.apache.hadoop.hbase.client.Scan)40 RegionScanner (org.apache.hadoop.hbase.regionserver.RegionScanner)32 NamespaceId (co.cask.cdap.proto.id.NamespaceId)26 NamespaceId (io.cdap.cdap.proto.id.NamespaceId)19 IOException (java.io.IOException)17 HTableDescriptorBuilder (io.cdap.cdap.data2.util.hbase.HTableDescriptorBuilder)16 Result (org.apache.hadoop.hbase.client.Result)16 Delete (org.apache.hadoop.hbase.client.Delete)15 Get (org.apache.hadoop.hbase.client.Get)14 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)14 HBaseTableUtil (io.cdap.cdap.data2.util.hbase.HBaseTableUtil)12 TableName (org.apache.hadoop.hbase.TableName)12 HTable (org.apache.hadoop.hbase.client.HTable)12