Search in sources :

Example 96 with TableName

use of org.apache.hadoop.hbase.TableName in project hadoop by apache.

the class TestHBaseStorageFlowRunCompaction method testWriteScanBatchLimit.

@Test
public void testWriteScanBatchLimit() throws Exception {
    String rowKey = "nonNumericRowKey";
    String column = "nonNumericColumnName";
    String value = "nonNumericValue";
    String column2 = "nonNumericColumnName2";
    String value2 = "nonNumericValue2";
    String column3 = "nonNumericColumnName3";
    String value3 = "nonNumericValue3";
    String column4 = "nonNumericColumnName4";
    String value4 = "nonNumericValue4";
    byte[] rowKeyBytes = Bytes.toBytes(rowKey);
    byte[] columnNameBytes = Bytes.toBytes(column);
    byte[] valueBytes = Bytes.toBytes(value);
    byte[] columnName2Bytes = Bytes.toBytes(column2);
    byte[] value2Bytes = Bytes.toBytes(value2);
    byte[] columnName3Bytes = Bytes.toBytes(column3);
    byte[] value3Bytes = Bytes.toBytes(value3);
    byte[] columnName4Bytes = Bytes.toBytes(column4);
    byte[] value4Bytes = Bytes.toBytes(value4);
    Put p = new Put(rowKeyBytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnNameBytes, valueBytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnName2Bytes, value2Bytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnName3Bytes, value3Bytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnName4Bytes, value4Bytes);
    Configuration hbaseConf = util.getConfiguration();
    TableName table = TableName.valueOf(hbaseConf.get(FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
    Connection conn = null;
    conn = ConnectionFactory.createConnection(hbaseConf);
    Table flowRunTable = conn.getTable(table);
    flowRunTable.put(p);
    String rowKey2 = "nonNumericRowKey2";
    byte[] rowKey2Bytes = Bytes.toBytes(rowKey2);
    p = new Put(rowKey2Bytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnNameBytes, valueBytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnName2Bytes, value2Bytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnName3Bytes, value3Bytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnName4Bytes, value4Bytes);
    flowRunTable.put(p);
    String rowKey3 = "nonNumericRowKey3";
    byte[] rowKey3Bytes = Bytes.toBytes(rowKey3);
    p = new Put(rowKey3Bytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnNameBytes, valueBytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnName2Bytes, value2Bytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnName3Bytes, value3Bytes);
    p.addColumn(FlowRunColumnFamily.INFO.getBytes(), columnName4Bytes, value4Bytes);
    flowRunTable.put(p);
    Scan s = new Scan();
    s.addFamily(FlowRunColumnFamily.INFO.getBytes());
    s.setStartRow(rowKeyBytes);
    // set number of cells to fetch per scanner next invocation
    int batchLimit = 2;
    s.setBatch(batchLimit);
    ResultScanner scanner = flowRunTable.getScanner(s);
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        assertTrue(result.rawCells().length <= batchLimit);
        Map<byte[], byte[]> values = result.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
        assertTrue(values.size() <= batchLimit);
    }
    s = new Scan();
    s.addFamily(FlowRunColumnFamily.INFO.getBytes());
    s.setStartRow(rowKeyBytes);
    // set number of cells to fetch per scanner next invocation
    batchLimit = 3;
    s.setBatch(batchLimit);
    scanner = flowRunTable.getScanner(s);
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        assertTrue(result.rawCells().length <= batchLimit);
        Map<byte[], byte[]> values = result.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
        assertTrue(values.size() <= batchLimit);
    }
    s = new Scan();
    s.addFamily(FlowRunColumnFamily.INFO.getBytes());
    s.setStartRow(rowKeyBytes);
    // set number of cells to fetch per scanner next invocation
    batchLimit = 1000;
    s.setBatch(batchLimit);
    scanner = flowRunTable.getScanner(s);
    int rowCount = 0;
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        assertTrue(result.rawCells().length <= batchLimit);
        Map<byte[], byte[]> values = result.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
        assertTrue(values.size() <= batchLimit);
        // we expect all back in one next call
        assertEquals(4, values.size());
        rowCount++;
    }
    // should get back 1 row with each invocation
    // if scan batch is set sufficiently high
    assertEquals(3, rowCount);
    // test with a negative number
    // should have same effect as setting it to a high number
    s = new Scan();
    s.addFamily(FlowRunColumnFamily.INFO.getBytes());
    s.setStartRow(rowKeyBytes);
    // set number of cells to fetch per scanner next invocation
    batchLimit = -2992;
    s.setBatch(batchLimit);
    scanner = flowRunTable.getScanner(s);
    rowCount = 0;
    for (Result result : scanner) {
        assertNotNull(result);
        assertTrue(!result.isEmpty());
        assertEquals(4, result.rawCells().length);
        Map<byte[], byte[]> values = result.getFamilyMap(FlowRunColumnFamily.INFO.getBytes());
        // we expect all back in one next call
        assertEquals(4, values.size());
        System.out.println(" values size " + values.size() + " " + batchLimit);
        rowCount++;
    }
    // should get back 1 row with each invocation
    // if scan batch is set sufficiently high
    assertEquals(3, rowCount);
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Configuration(org.apache.hadoop.conf.Configuration) Connection(org.apache.hadoop.hbase.client.Connection) Scan(org.apache.hadoop.hbase.client.Scan) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 97 with TableName

use of org.apache.hadoop.hbase.TableName in project hadoop by apache.

the class BaseTable method getTableMutator.

/**
   * Used to create a type-safe mutator for this table.
   *
   * @param hbaseConf used to read table name.
   * @param conn used to create a table from.
   * @return a type safe {@link BufferedMutator} for the entity table.
   * @throws IOException if any exception occurs while creating mutator for the
   *     table.
   */
public TypedBufferedMutator<T> getTableMutator(Configuration hbaseConf, Connection conn) throws IOException {
    TableName tableName = this.getTableName(hbaseConf);
    // Plain buffered mutator
    BufferedMutator bufferedMutator = conn.getBufferedMutator(tableName);
    // Now make this thing type safe.
    // This is how service initialization should hang on to this variable, with
    // the proper type
    TypedBufferedMutator<T> table = new BufferedMutatorDelegator<T>(bufferedMutator);
    return table;
}
Also used : TableName(org.apache.hadoop.hbase.TableName) BufferedMutator(org.apache.hadoop.hbase.client.BufferedMutator)

Example 98 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class ReplicationSerDeHelper method convert.

/** convert map to TableCFs Object */
public static ReplicationProtos.TableCF[] convert(Map<TableName, ? extends Collection<String>> tableCfs) {
    if (tableCfs == null) {
        return null;
    }
    List<ReplicationProtos.TableCF> tableCFList = new ArrayList<>(tableCfs.entrySet().size());
    ReplicationProtos.TableCF.Builder tableCFBuilder = ReplicationProtos.TableCF.newBuilder();
    for (Map.Entry<TableName, ? extends Collection<String>> entry : tableCfs.entrySet()) {
        tableCFBuilder.clear();
        tableCFBuilder.setTableName(ProtobufUtil.toProtoTableName(entry.getKey()));
        Collection<String> v = entry.getValue();
        if (v != null && !v.isEmpty()) {
            for (String value : entry.getValue()) {
                tableCFBuilder.addFamilies(ByteString.copyFromUtf8(value));
            }
        }
        tableCFList.add(tableCFBuilder.build());
    }
    return tableCFList.toArray(new ReplicationProtos.TableCF[tableCFList.size()]);
}
Also used : TableName(org.apache.hadoop.hbase.TableName) ReplicationProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos) ArrayList(java.util.ArrayList) ByteString(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) HashMap(java.util.HashMap) Map(java.util.Map)

Example 99 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class QuotaRetriever method next.

public QuotaSettings next() throws IOException {
    if (cache.isEmpty()) {
        Result result = scanner.next();
        if (result == null)
            return null;
        QuotaTableUtil.parseResult(result, new QuotaTableUtil.QuotasVisitor() {

            @Override
            public void visitUserQuotas(String userName, Quotas quotas) {
                cache.addAll(QuotaSettingsFactory.fromUserQuotas(userName, quotas));
            }

            @Override
            public void visitUserQuotas(String userName, TableName table, Quotas quotas) {
                cache.addAll(QuotaSettingsFactory.fromUserQuotas(userName, table, quotas));
            }

            @Override
            public void visitUserQuotas(String userName, String namespace, Quotas quotas) {
                cache.addAll(QuotaSettingsFactory.fromUserQuotas(userName, namespace, quotas));
            }

            @Override
            public void visitTableQuotas(TableName tableName, Quotas quotas) {
                cache.addAll(QuotaSettingsFactory.fromTableQuotas(tableName, quotas));
            }

            @Override
            public void visitNamespaceQuotas(String namespace, Quotas quotas) {
                cache.addAll(QuotaSettingsFactory.fromNamespaceQuotas(namespace, quotas));
            }
        });
    }
    return cache.poll();
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Quotas(org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas) Result(org.apache.hadoop.hbase.client.Result)

Example 100 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class QuotaTableUtil method makeGetForUserQuotas.

public static Get makeGetForUserQuotas(final String user, final Iterable<TableName> tables, final Iterable<String> namespaces) {
    Get get = new Get(getUserRowKey(user));
    get.addColumn(QUOTA_FAMILY_INFO, QUOTA_QUALIFIER_SETTINGS);
    for (final TableName table : tables) {
        get.addColumn(QUOTA_FAMILY_INFO, getSettingsQualifierForUserTable(table));
    }
    for (final String ns : namespaces) {
        get.addColumn(QUOTA_FAMILY_INFO, getSettingsQualifierForUserNamespace(ns));
    }
    return get;
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Get(org.apache.hadoop.hbase.client.Get)

Aggregations

TableName (org.apache.hadoop.hbase.TableName)1029 Test (org.junit.Test)694 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)257 Table (org.apache.hadoop.hbase.client.Table)227 IOException (java.io.IOException)225 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)215 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)203 Result (org.apache.hadoop.hbase.client.Result)124 ArrayList (java.util.ArrayList)118 Put (org.apache.hadoop.hbase.client.Put)118 Path (org.apache.hadoop.fs.Path)113 Connection (org.apache.hadoop.hbase.client.Connection)103 Scan (org.apache.hadoop.hbase.client.Scan)97 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)88 ServerName (org.apache.hadoop.hbase.ServerName)85 Admin (org.apache.hadoop.hbase.client.Admin)85 Cell (org.apache.hadoop.hbase.Cell)77 HashMap (java.util.HashMap)75 Delete (org.apache.hadoop.hbase.client.Delete)65 InterruptedIOException (java.io.InterruptedIOException)63