Search in sources :

Example 11 with WALEntry

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry in project hbase by apache.

the class TestReplicateToReplica method replicate.

private void replicate(Pair<List<WAL.Entry>, CompletableFuture<Void>> pair) throws IOException {
    Pair<ReplicateWALEntryRequest, CellScanner> params = ReplicationProtobufUtil.buildReplicateWALEntryRequest(pair.getFirst().toArray(new WAL.Entry[0]), secondary.getRegionInfo().getEncodedNameAsBytes(), null, null, null);
    for (WALEntry entry : params.getFirst().getEntryList()) {
        secondary.replayWALEntry(entry, params.getSecond());
    }
    pair.getSecond().complete(null);
}
Also used : WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) ReplicateWALEntryRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) CellScanner(org.apache.hadoop.hbase.CellScanner)

Example 12 with WALEntry

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry in project hbase by apache.

the class TestReplicationSink method testMixedDeletes.

/**
 * Insert then do different types of deletes
 * @throws Exception
 */
@Test
public void testMixedDeletes() throws Exception {
    List<WALEntry> entries = new ArrayList<>(3);
    List<Cell> cells = new ArrayList<>();
    for (int i = 0; i < 3; i++) {
        entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells));
    }
    SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir);
    entries = new ArrayList<>(3);
    cells = new ArrayList<>();
    entries.add(createEntry(TABLE_NAME1, 0, KeyValue.Type.DeleteColumn, cells));
    entries.add(createEntry(TABLE_NAME1, 1, KeyValue.Type.DeleteFamily, cells));
    entries.add(createEntry(TABLE_NAME1, 2, KeyValue.Type.DeleteColumn, cells));
    SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir);
    Scan scan = new Scan();
    ResultScanner scanRes = table1.getScanner(scan);
    assertEquals(0, scanRes.next(3).length);
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 13 with WALEntry

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry in project hbase by apache.

the class TestReplicationSink method createEntry.

private WALEntry createEntry(TableName table, int row, KeyValue.Type type, List<Cell> cells) {
    byte[] fam = table.equals(TABLE_NAME1) ? FAM_NAME1 : FAM_NAME2;
    byte[] rowBytes = Bytes.toBytes(row);
    // same key
    try {
        Thread.sleep(1);
    } catch (InterruptedException e) {
        LOG.info("Was interrupted while sleep, meh", e);
    }
    final long now = EnvironmentEdgeManager.currentTime();
    KeyValue kv = null;
    if (type.getCode() == KeyValue.Type.Put.getCode()) {
        kv = new KeyValue(rowBytes, fam, fam, now, KeyValue.Type.Put, Bytes.toBytes(row));
    } else if (type.getCode() == KeyValue.Type.DeleteColumn.getCode()) {
        kv = new KeyValue(rowBytes, fam, fam, now, KeyValue.Type.DeleteColumn);
    } else if (type.getCode() == KeyValue.Type.DeleteFamily.getCode()) {
        kv = new KeyValue(rowBytes, fam, null, now, KeyValue.Type.DeleteFamily);
    }
    WALEntry.Builder builder = createWALEntryBuilder(table);
    cells.add(kv);
    return builder.build();
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry)

Example 14 with WALEntry

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry in project hbase by apache.

the class TestReplicationSink method testRethrowRetriesExhaustedException.

@Test
public void testRethrowRetriesExhaustedException() throws Exception {
    TableName notExistTable = TableName.valueOf("notExistTable");
    List<WALEntry> entries = new ArrayList<>();
    List<Cell> cells = new ArrayList<>();
    for (int i = 0; i < 10; i++) {
        entries.add(createEntry(notExistTable, i, KeyValue.Type.Put, cells));
    }
    try {
        SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir);
        Assert.fail("Should re-throw TableNotFoundException.");
    } catch (TableNotFoundException e) {
    }
    entries.clear();
    cells.clear();
    for (int i = 0; i < 10; i++) {
        entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells));
    }
    try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) {
        try (Admin admin = conn.getAdmin()) {
            admin.disableTable(TABLE_NAME1);
            try {
                SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir);
                Assert.fail("Should re-throw RetriesExhaustedWithDetailsException.");
            } catch (RetriesExhaustedException e) {
            } finally {
                admin.enableTable(TABLE_NAME1);
            }
        }
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) RetriesExhaustedException(org.apache.hadoop.hbase.client.RetriesExhaustedException) ArrayList(java.util.ArrayList) Connection(org.apache.hadoop.hbase.client.Connection) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) Admin(org.apache.hadoop.hbase.client.Admin) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 15 with WALEntry

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry in project hbase by apache.

the class TestReplicationSink method testMixedPutTables.

/**
 * Insert to 2 different tables
 * @throws Exception
 */
@Test
public void testMixedPutTables() throws Exception {
    List<WALEntry> entries = new ArrayList<>(BATCH_SIZE / 2);
    List<Cell> cells = new ArrayList<>();
    for (int i = 0; i < BATCH_SIZE; i++) {
        entries.add(createEntry(i % 2 == 0 ? TABLE_NAME2 : TABLE_NAME1, i, KeyValue.Type.Put, cells));
    }
    SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir);
    Scan scan = new Scan();
    ResultScanner scanRes = table2.getScanner(scan);
    for (Result res : scanRes) {
        assertEquals(0, Bytes.toInt(res.getRow()) % 2);
    }
    scanRes = table1.getScanner(scan);
    for (Result res : scanRes) {
        assertEquals(1, Bytes.toInt(res.getRow()) % 2);
    }
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) Cell(org.apache.hadoop.hbase.Cell) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Aggregations

WALEntry (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry)15 ArrayList (java.util.ArrayList)11 Cell (org.apache.hadoop.hbase.Cell)9 Test (org.junit.Test)8 Scan (org.apache.hadoop.hbase.client.Scan)7 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)6 IOException (java.io.IOException)5 CellScanner (org.apache.hadoop.hbase.CellScanner)5 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)4 UncheckedIOException (java.io.UncheckedIOException)3 HashMap (java.util.HashMap)3 List (java.util.List)3 Result (org.apache.hadoop.hbase.client.Result)3 HashSet (java.util.HashSet)2 Map (java.util.Map)2 TreeMap (java.util.TreeMap)2 UUID (java.util.UUID)2 Configuration (org.apache.hadoop.conf.Configuration)2 HBaseIOException (org.apache.hadoop.hbase.HBaseIOException)2 KeyValue (org.apache.hadoop.hbase.KeyValue)2