Search in sources :

Example 16 with Row

use of org.apache.hadoop.hbase.client.Row in project hbase by apache.

the class ReplicationSink method batch.

/**
 * Do the changes and handle the pool
 * @param tableName table to insert into
 * @param allRows list of actions
 * @param batchRowSizeThreshold rowSize threshold for batch mutation
 */
private void batch(TableName tableName, Collection<List<Row>> allRows, int batchRowSizeThreshold) throws IOException {
    if (allRows.isEmpty()) {
        return;
    }
    AsyncTable<?> table = getConnection().getTable(tableName);
    List<Future<?>> futures = new ArrayList<>();
    for (List<Row> rows : allRows) {
        List<List<Row>> batchRows;
        if (rows.size() > batchRowSizeThreshold) {
            batchRows = Lists.partition(rows, batchRowSizeThreshold);
        } else {
            batchRows = Collections.singletonList(rows);
        }
        futures.addAll(batchRows.stream().map(table::batchAll).collect(Collectors.toList()));
    }
    for (Future<?> future : futures) {
        try {
            FutureUtils.get(future);
        } catch (RetriesExhaustedException e) {
            if (e.getCause() instanceof TableNotFoundException) {
                throw new TableNotFoundException("'" + tableName + "'");
            }
            throw e;
        }
    }
}
Also used : TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) RetriesExhaustedException(org.apache.hadoop.hbase.client.RetriesExhaustedException) ArrayList(java.util.ArrayList) Future(java.util.concurrent.Future) ArrayList(java.util.ArrayList) List(java.util.List) Row(org.apache.hadoop.hbase.client.Row)

Example 17 with Row

use of org.apache.hadoop.hbase.client.Row in project hbase by apache.

the class TestIncrementTimeRange method checkHTableInterfaceMethods.

private void checkHTableInterfaceMethods() throws Exception {
    long time = EnvironmentEdgeManager.currentTime();
    mee.setValue(time);
    hTableInterface.put(new Put(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, Bytes.toBytes(1L)));
    checkRowValue(ROW_A, Bytes.toBytes(1L));
    time = EnvironmentEdgeManager.currentTime();
    mee.setValue(time);
    TimeRange range10 = TimeRange.between(1, time + 10);
    hTableInterface.increment(new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 10L).setTimeRange(range10.getMin(), range10.getMax()));
    checkRowValue(ROW_A, Bytes.toBytes(11L));
    assertEquals(MyObserver.tr10.getMin(), range10.getMin());
    assertEquals(MyObserver.tr10.getMax(), range10.getMax());
    time = EnvironmentEdgeManager.currentTime();
    mee.setValue(time);
    TimeRange range2 = TimeRange.between(1, time + 20);
    List<Row> actions = Arrays.asList(new Row[] { new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 2L).setTimeRange(range2.getMin(), range2.getMax()), new Increment(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 2L).setTimeRange(range2.getMin(), range2.getMax()) });
    Object[] results3 = new Object[actions.size()];
    Object[] results1 = results3;
    hTableInterface.batch(actions, results1);
    assertEquals(MyObserver.tr2.getMin(), range2.getMin());
    assertEquals(MyObserver.tr2.getMax(), range2.getMax());
    for (Object r2 : results1) {
        assertTrue(r2 instanceof Result);
    }
    checkRowValue(ROW_A, Bytes.toBytes(15L));
    hTableInterface.close();
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) Increment(org.apache.hadoop.hbase.client.Increment) Row(org.apache.hadoop.hbase.client.Row) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result)

Example 18 with Row

use of org.apache.hadoop.hbase.client.Row in project hbase by apache.

the class TestAppendTimeRange method testHTableInterfaceMethods.

@Test
public void testHTableInterfaceMethods() throws Exception {
    try (Table table = util.createTable(TableName.valueOf(name.getMethodName()), TEST_FAMILY)) {
        table.put(new Put(ROW).addColumn(TEST_FAMILY, QUAL, VALUE));
        long time = EnvironmentEdgeManager.currentTime();
        mee.setValue(time);
        table.put(new Put(ROW).addColumn(TEST_FAMILY, QUAL, Bytes.toBytes("a")));
        checkRowValue(table, ROW, Bytes.toBytes("a"));
        time = EnvironmentEdgeManager.currentTime();
        mee.setValue(time);
        TimeRange range10 = TimeRange.between(1, time + 10);
        table.append(new Append(ROW).addColumn(TEST_FAMILY, QUAL, Bytes.toBytes("b")).setTimeRange(range10.getMin(), range10.getMax()));
        checkRowValue(table, ROW, Bytes.toBytes("ab"));
        assertEquals(MyObserver.tr10.getMin(), range10.getMin());
        assertEquals(MyObserver.tr10.getMax(), range10.getMax());
        time = EnvironmentEdgeManager.currentTime();
        mee.setValue(time);
        TimeRange range2 = TimeRange.between(1, time + 20);
        List<Row> actions = Arrays.asList(new Row[] { new Append(ROW).addColumn(TEST_FAMILY, QUAL, Bytes.toBytes("c")).setTimeRange(range2.getMin(), range2.getMax()), new Append(ROW).addColumn(TEST_FAMILY, QUAL, Bytes.toBytes("c")).setTimeRange(range2.getMin(), range2.getMax()) });
        Object[] results1 = new Object[actions.size()];
        table.batch(actions, results1);
        assertEquals(MyObserver.tr2.getMin(), range2.getMin());
        assertEquals(MyObserver.tr2.getMax(), range2.getMax());
        for (Object r2 : results1) {
            assertTrue(r2 instanceof Result);
        }
        checkRowValue(table, ROW, Bytes.toBytes("abcc"));
    }
}
Also used : TimeRange(org.apache.hadoop.hbase.io.TimeRange) Table(org.apache.hadoop.hbase.client.Table) Append(org.apache.hadoop.hbase.client.Append) Row(org.apache.hadoop.hbase.client.Row) Put(org.apache.hadoop.hbase.client.Put) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 19 with Row

use of org.apache.hadoop.hbase.client.Row in project bagheera by mozilla-metrics.

the class FlushResult method flushTable.

private FlushResult flushTable(HTableInterface table, List<Row> puts) throws IOException, InterruptedException {
    List<Row> currentAttempt = puts;
    Object[] batch = null;
    FlushResult result = null;
    int successfulPuts = 0;
    int successfulDeletes = 0;
    TimerContext htableTimerContext = htableTimer.time();
    try {
        for (int attempt = 0; attempt < retryCount; attempt++) {
            // TODO: wrap each attempt in a try/catch?
            batch = table.batch(currentAttempt);
            table.flushCommits();
            List<Row> fails = new ArrayList<Row>(currentAttempt.size());
            if (batch != null) {
                for (int i = 0; i < batch.length; i++) {
                    if (batch[i] == null) {
                        fails.add(currentAttempt.get(i));
                    } else {
                        // figure out what type it was
                        Row row = currentAttempt.get(i);
                        if (row instanceof Delete) {
                            successfulDeletes++;
                        } else if (row instanceof Put) {
                            successfulPuts++;
                        } else {
                            LOG.warn("We succeeded in flushing something that's neither a Delete nor a Put");
                        }
                    }
                }
                currentAttempt = fails;
                if (currentAttempt.isEmpty()) {
                    break;
                }
            } else {
                // something badly broke, retry the whole list.
                LOG.error("Result of table.batch() was null");
            }
        }
        int failedPuts = 0;
        int failedDeletes = 0;
        if (!currentAttempt.isEmpty()) {
            for (Row row : currentAttempt) {
                if (row instanceof Delete) {
                    failedDeletes++;
                } else if (row instanceof Put) {
                    failedPuts++;
                } else {
                    LOG.error("We failed to flush something that's neither a Delete nor a Put");
                }
            }
        }
        result = new FlushResult(failedPuts, failedDeletes, successfulPuts, successfulDeletes);
    } finally {
        htableTimerContext.stop();
    }
    return result;
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) TimerContext(com.yammer.metrics.core.TimerContext) ArrayList(java.util.ArrayList) Row(org.apache.hadoop.hbase.client.Row) Put(org.apache.hadoop.hbase.client.Put)

Example 20 with Row

use of org.apache.hadoop.hbase.client.Row in project janusgraph by JanusGraph.

the class HBaseStoreManager method mutateMany.

@Override
public void mutateMany(Map<String, Map<StaticBuffer, KCVMutation>> mutations, StoreTransaction txh) throws BackendException {
    Long putTimestamp = null;
    Long delTimestamp = null;
    MaskedTimestamp commitTime = null;
    if (assignTimestamp) {
        commitTime = new MaskedTimestamp(txh);
        putTimestamp = commitTime.getAdditionTime(times);
        delTimestamp = commitTime.getDeletionTime(times);
    }
    // In case of an addition and deletion with identical timestamps, the
    // deletion tombstone wins.
    // https://hbase.apache.org/book/versions.html#d244e4250
    final Map<StaticBuffer, Pair<List<Put>, Delete>> commandsPerKey = convertToCommands(mutations, putTimestamp, delTimestamp);
    // actual batch operation
    final List<Row> batch = new ArrayList<>(commandsPerKey.size());
    // convert sorted commands into representation required for 'batch' operation
    for (Pair<List<Put>, Delete> commands : commandsPerKey.values()) {
        if (commands.getFirst() != null && !commands.getFirst().isEmpty())
            batch.addAll(commands.getFirst());
        if (commands.getSecond() != null)
            batch.add(commands.getSecond());
    }
    try {
        Table table = null;
        try {
            table = cnx.getTable(tableName);
            table.batch(batch, new Object[batch.size()]);
        } finally {
            IOUtils.closeQuietly(table);
        }
    } catch (IOException | InterruptedException e) {
        throw new TemporaryBackendException(e);
    }
    if (commitTime != null) {
        sleepAfterWrite(commitTime);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) ArrayList(java.util.ArrayList) IOException(java.io.IOException) Put(org.apache.hadoop.hbase.client.Put) TemporaryBackendException(org.janusgraph.diskstorage.TemporaryBackendException) StaticBuffer(org.janusgraph.diskstorage.StaticBuffer) List(java.util.List) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) Row(org.apache.hadoop.hbase.client.Row) Pair(org.apache.hadoop.hbase.util.Pair)

Aggregations

Row (org.apache.hadoop.hbase.client.Row)20 Put (org.apache.hadoop.hbase.client.Put)16 ArrayList (java.util.ArrayList)14 Delete (org.apache.hadoop.hbase.client.Delete)12 HTableInterface (org.apache.hadoop.hbase.client.HTableInterface)7 IOException (java.io.IOException)5 Append (org.apache.hadoop.hbase.client.Append)4 Get (org.apache.hadoop.hbase.client.Get)4 Increment (org.apache.hadoop.hbase.client.Increment)4 Pair (org.apache.hadoop.hbase.util.Pair)4 List (java.util.List)3 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)3 Action (org.apache.hadoop.hbase.client.Action)3 RegionCoprocessorServiceExec (org.apache.hadoop.hbase.client.RegionCoprocessorServiceExec)3 Result (org.apache.hadoop.hbase.client.Result)3 RowMutations (org.apache.hadoop.hbase.client.RowMutations)3 Table (org.apache.hadoop.hbase.client.Table)3 RegionAction (org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction)3 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)3 ConnectionQueryServices (org.apache.phoenix.query.ConnectionQueryServices)3