Search in sources :

Example 6 with WALEntry

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry in project hbase by apache.

the class TestReplicationSink method testReplicateEntriesForHFiles.

/**
   * Test replicateEntries with a bulk load entry for 25 HFiles
   */
@Test
public void testReplicateEntriesForHFiles() throws Exception {
    Path dir = TEST_UTIL.getDataTestDirOnTestFS("testReplicateEntries");
    Path familyDir = new Path(dir, Bytes.toString(FAM_NAME1));
    int numRows = 10;
    List<Path> p = new ArrayList<>(1);
    // 1. Generate 25 hfile ranges
    Random rng = new SecureRandom();
    Set<Integer> numbers = new HashSet<>();
    while (numbers.size() < 50) {
        numbers.add(rng.nextInt(1000));
    }
    List<Integer> numberList = new ArrayList<>(numbers);
    Collections.sort(numberList);
    Map<String, Long> storeFilesSize = new HashMap<>(1);
    // 2. Create 25 hfiles
    Configuration conf = TEST_UTIL.getConfiguration();
    FileSystem fs = dir.getFileSystem(conf);
    Iterator<Integer> numbersItr = numberList.iterator();
    for (int i = 0; i < 25; i++) {
        Path hfilePath = new Path(familyDir, "hfile_" + i);
        HFileTestUtil.createHFile(conf, fs, hfilePath, FAM_NAME1, FAM_NAME1, Bytes.toBytes(numbersItr.next()), Bytes.toBytes(numbersItr.next()), numRows);
        p.add(hfilePath);
        storeFilesSize.put(hfilePath.getName(), fs.getFileStatus(hfilePath).getLen());
    }
    // 3. Create a BulkLoadDescriptor and a WALEdit
    Map<byte[], List<Path>> storeFiles = new HashMap<>(1);
    storeFiles.put(FAM_NAME1, p);
    WALEdit edit = null;
    WALProtos.BulkLoadDescriptor loadDescriptor = null;
    try (Connection c = ConnectionFactory.createConnection(conf);
        RegionLocator l = c.getRegionLocator(TABLE_NAME1)) {
        HRegionInfo regionInfo = l.getAllRegionLocations().get(0).getRegionInfo();
        loadDescriptor = ProtobufUtil.toBulkLoadDescriptor(TABLE_NAME1, UnsafeByteOperations.unsafeWrap(regionInfo.getEncodedNameAsBytes()), storeFiles, storeFilesSize, 1);
        edit = WALEdit.createBulkLoadEvent(regionInfo, loadDescriptor);
    }
    List<WALEntry> entries = new ArrayList<>(1);
    // 4. Create a WALEntryBuilder
    WALEntry.Builder builder = createWALEntryBuilder(TABLE_NAME1);
    // 5. Copy the hfile to the path as it is in reality
    for (int i = 0; i < 25; i++) {
        String pathToHfileFromNS = new StringBuilder(100).append(TABLE_NAME1.getNamespaceAsString()).append(Path.SEPARATOR).append(Bytes.toString(TABLE_NAME1.getName())).append(Path.SEPARATOR).append(Bytes.toString(loadDescriptor.getEncodedRegionName().toByteArray())).append(Path.SEPARATOR).append(Bytes.toString(FAM_NAME1)).append(Path.SEPARATOR).append("hfile_" + i).toString();
        String dst = baseNamespaceDir + Path.SEPARATOR + pathToHfileFromNS;
        FileUtil.copy(fs, p.get(0), fs, new Path(dst), false, conf);
    }
    entries.add(builder.build());
    try (ResultScanner scanner = table1.getScanner(new Scan())) {
        // 6. Assert no existing data in table
        assertEquals(0, scanner.next(numRows).length);
    }
    // 7. Replicate the bulk loaded entry
    SINK.replicateEntries(entries, CellUtil.createCellScanner(edit.getCells().iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir);
    try (ResultScanner scanner = table1.getScanner(new Scan())) {
        // 8. Assert data is replicated
        assertEquals(numRows, scanner.next(numRows).length);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) Random(java.util.Random) SecureRandom(java.security.SecureRandom) WALEdit(org.apache.hadoop.hbase.regionserver.wal.WALEdit) FileSystem(org.apache.hadoop.fs.FileSystem) List(java.util.List) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Path(org.apache.hadoop.fs.Path) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) SecureRandom(java.security.SecureRandom) WALProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos) Scan(org.apache.hadoop.hbase.client.Scan) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) Test(org.junit.Test)

Example 7 with WALEntry

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry in project hbase by apache.

the class TestReplicationSink method testMixedPutTables.

/**
   * Insert to 2 different tables
   * @throws Exception
   */
@Test
public void testMixedPutTables() throws Exception {
    List<WALEntry> entries = new ArrayList<>(BATCH_SIZE / 2);
    List<Cell> cells = new ArrayList<>();
    for (int i = 0; i < BATCH_SIZE; i++) {
        entries.add(createEntry(i % 2 == 0 ? TABLE_NAME2 : TABLE_NAME1, i, KeyValue.Type.Put, cells));
    }
    SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir);
    Scan scan = new Scan();
    ResultScanner scanRes = table2.getScanner(scan);
    for (Result res : scanRes) {
        assertTrue(Bytes.toInt(res.getRow()) % 2 == 0);
    }
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) Cell(org.apache.hadoop.hbase.Cell) Result(org.apache.hadoop.hbase.client.Result) Test(org.junit.Test)

Example 8 with WALEntry

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry in project hbase by apache.

the class TestReplicationSink method createEntry.

private WALEntry createEntry(TableName table, int row, KeyValue.Type type, List<Cell> cells) {
    byte[] fam = table.equals(TABLE_NAME1) ? FAM_NAME1 : FAM_NAME2;
    byte[] rowBytes = Bytes.toBytes(row);
    // same key
    try {
        Thread.sleep(1);
    } catch (InterruptedException e) {
        LOG.info("Was interrupted while sleep, meh", e);
    }
    final long now = System.currentTimeMillis();
    KeyValue kv = null;
    if (type.getCode() == KeyValue.Type.Put.getCode()) {
        kv = new KeyValue(rowBytes, fam, fam, now, KeyValue.Type.Put, Bytes.toBytes(row));
    } else if (type.getCode() == KeyValue.Type.DeleteColumn.getCode()) {
        kv = new KeyValue(rowBytes, fam, fam, now, KeyValue.Type.DeleteColumn);
    } else if (type.getCode() == KeyValue.Type.DeleteFamily.getCode()) {
        kv = new KeyValue(rowBytes, fam, null, now, KeyValue.Type.DeleteFamily);
    }
    WALEntry.Builder builder = createWALEntryBuilder(table);
    cells.add(kv);
    return builder.build();
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry)

Example 9 with WALEntry

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry in project hbase by apache.

the class TestReplicationSink method testMixedDeletes.

/**
   * Insert then do different types of deletes
   * @throws Exception
   */
@Test
public void testMixedDeletes() throws Exception {
    List<WALEntry> entries = new ArrayList<>(3);
    List<Cell> cells = new ArrayList<>();
    for (int i = 0; i < 3; i++) {
        entries.add(createEntry(TABLE_NAME1, i, KeyValue.Type.Put, cells));
    }
    SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir);
    entries = new ArrayList<>(3);
    cells = new ArrayList<>();
    entries.add(createEntry(TABLE_NAME1, 0, KeyValue.Type.DeleteColumn, cells));
    entries.add(createEntry(TABLE_NAME1, 1, KeyValue.Type.DeleteFamily, cells));
    entries.add(createEntry(TABLE_NAME1, 2, KeyValue.Type.DeleteColumn, cells));
    SINK.replicateEntries(entries, CellUtil.createCellScanner(cells.iterator()), replicationClusterId, baseNamespaceDir, hfileArchiveDir);
    Scan scan = new Scan();
    ResultScanner scanRes = table1.getScanner(scan);
    assertEquals(0, scanRes.next(3).length);
}
Also used : ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 10 with WALEntry

use of org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry in project hbase by apache.

the class RSRpcServices method replicateWALEntry.

/**
   * Replicate WAL entries on the region server.
   *
   * @param controller the RPC controller
   * @param request the request
   * @throws ServiceException
   */
@Override
@QosPriority(priority = HConstants.REPLICATION_QOS)
public ReplicateWALEntryResponse replicateWALEntry(final RpcController controller, final ReplicateWALEntryRequest request) throws ServiceException {
    try {
        checkOpen();
        if (regionServer.replicationSinkHandler != null) {
            requestCount.increment();
            List<WALEntry> entries = request.getEntryList();
            CellScanner cellScanner = ((HBaseRpcController) controller).cellScanner();
            regionServer.getRegionServerCoprocessorHost().preReplicateLogEntries(entries, cellScanner);
            regionServer.replicationSinkHandler.replicateLogEntries(entries, cellScanner, request.getReplicationClusterId(), request.getSourceBaseNamespaceDirPath(), request.getSourceHFileArchiveDirPath());
            regionServer.getRegionServerCoprocessorHost().postReplicateLogEntries(entries, cellScanner);
            return ReplicateWALEntryResponse.newBuilder().build();
        } else {
            throw new ServiceException("Replication services are not initialized yet");
        }
    } catch (IOException ie) {
        throw new ServiceException(ie);
    }
}
Also used : HBaseRpcController(org.apache.hadoop.hbase.ipc.HBaseRpcController) ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) WALEntry(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) HBaseIOException(org.apache.hadoop.hbase.HBaseIOException) CellScanner(org.apache.hadoop.hbase.CellScanner) QosPriority(org.apache.hadoop.hbase.ipc.QosPriority)

Aggregations

WALEntry (org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry)10 ArrayList (java.util.ArrayList)8 Cell (org.apache.hadoop.hbase.Cell)6 Test (org.junit.Test)6 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)5 Scan (org.apache.hadoop.hbase.client.Scan)5 IOException (java.io.IOException)3 InterruptedIOException (java.io.InterruptedIOException)3 HashMap (java.util.HashMap)2 List (java.util.List)2 CellScanner (org.apache.hadoop.hbase.CellScanner)2 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)2 HBaseIOException (org.apache.hadoop.hbase.HBaseIOException)2 Result (org.apache.hadoop.hbase.client.Result)2 HBaseRpcController (org.apache.hadoop.hbase.ipc.HBaseRpcController)2 QosPriority (org.apache.hadoop.hbase.ipc.QosPriority)2 WALEdit (org.apache.hadoop.hbase.regionserver.wal.WALEdit)2 ServiceException (org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException)2 SecureRandom (java.security.SecureRandom)1 HashSet (java.util.HashSet)1