Search in sources :

Example 6 with StoreDescriptor

use of org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor in project hbase by apache.

the class HRegion method replayWALBulkLoadEventMarker.

void replayWALBulkLoadEventMarker(WALProtos.BulkLoadDescriptor bulkLoadEvent) throws IOException {
    checkTargetRegion(bulkLoadEvent.getEncodedRegionName().toByteArray(), "BulkLoad marker from WAL ", bulkLoadEvent);
    if (ServerRegionReplicaUtil.isDefaultReplica(this.getRegionInfo())) {
        // if primary nothing to do
        return;
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug(getRegionInfo().getEncodedName() + " : " + "Replaying bulkload event marker " + TextFormat.shortDebugString(bulkLoadEvent));
    }
    // check if multiple families involved
    boolean multipleFamilies = false;
    byte[] family = null;
    for (StoreDescriptor storeDescriptor : bulkLoadEvent.getStoresList()) {
        byte[] fam = storeDescriptor.getFamilyName().toByteArray();
        if (family == null) {
            family = fam;
        } else if (!Bytes.equals(family, fam)) {
            multipleFamilies = true;
            break;
        }
    }
    startBulkRegionOperation(multipleFamilies);
    try {
        // we will use writestate as a coarse-grain lock for all the replay events
        synchronized (writestate) {
            // smaller than this seqId
            if (bulkLoadEvent.getBulkloadSeqNum() >= 0 && this.lastReplayedOpenRegionSeqId >= bulkLoadEvent.getBulkloadSeqNum()) {
                LOG.warn(getRegionInfo().getEncodedName() + " : " + "Skipping replaying bulkload event :" + TextFormat.shortDebugString(bulkLoadEvent) + " because its sequence id is smaller than this region's lastReplayedOpenRegionSeqId" + " =" + lastReplayedOpenRegionSeqId);
                return;
            }
            for (StoreDescriptor storeDescriptor : bulkLoadEvent.getStoresList()) {
                // stores of primary may be different now
                family = storeDescriptor.getFamilyName().toByteArray();
                HStore store = getHStore(family);
                if (store == null) {
                    LOG.warn(getRegionInfo().getEncodedName() + " : " + "Received a bulk load marker from primary, but the family is not found. " + "Ignoring. StoreDescriptor:" + storeDescriptor);
                    continue;
                }
                List<String> storeFiles = storeDescriptor.getStoreFileList();
                for (String storeFile : storeFiles) {
                    StoreFileInfo storeFileInfo = null;
                    try {
                        storeFileInfo = fs.getStoreFileInfo(Bytes.toString(family), storeFile);
                        store.bulkLoadHFile(storeFileInfo);
                    } catch (FileNotFoundException ex) {
                        LOG.warn(getRegionInfo().getEncodedName() + " : " + ((storeFileInfo != null) ? storeFileInfo.toString() : (new Path(Bytes.toString(family), storeFile)).toString()) + " doesn't exist any more. Skip loading the file");
                    }
                }
            }
        }
        if (bulkLoadEvent.getBulkloadSeqNum() > 0) {
            mvcc.advanceTo(bulkLoadEvent.getBulkloadSeqNum());
        }
    } finally {
        closeBulkRegionOperation();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileNotFoundException(java.io.FileNotFoundException) StoreDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor)

Example 7 with StoreDescriptor

use of org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor in project hbase by apache.

the class TestHRegion method testOpenRegionWrittenToWAL.

@Test
public void testOpenRegionWrittenToWAL() throws Exception {
    final ServerName serverName = ServerName.valueOf(name.getMethodName(), 100, 42);
    final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
    htd.addFamily(new HColumnDescriptor(fam1));
    htd.addFamily(new HColumnDescriptor(fam2));
    HRegionInfo hri = new HRegionInfo(htd.getTableName(), HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
    // open the region w/o rss and wal and flush some files
    HRegion region = HBaseTestingUtility.createRegionAndWAL(hri, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
    assertNotNull(region);
    // create a file in fam1 for the region before opening in OpenRegionHandler
    region.put(new Put(Bytes.toBytes("a")).addColumn(fam1, fam1, fam1));
    region.flush(true);
    HBaseTestingUtility.closeRegionAndWAL(region);
    ArgumentCaptor<WALEdit> editCaptor = ArgumentCaptor.forClass(WALEdit.class);
    // capture append() calls
    WAL wal = mockWAL();
    when(rss.getWAL((HRegionInfo) any())).thenReturn(wal);
    try {
        region = HRegion.openHRegion(hri, htd, rss.getWAL(hri), TEST_UTIL.getConfiguration(), rss, null);
        verify(wal, times(1)).append((HRegionInfo) any(), (WALKey) any(), editCaptor.capture(), anyBoolean());
        WALEdit edit = editCaptor.getValue();
        assertNotNull(edit);
        assertNotNull(edit.getCells());
        assertEquals(1, edit.getCells().size());
        RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getCells().get(0));
        assertNotNull(desc);
        LOG.info("RegionEventDescriptor from WAL: " + desc);
        assertEquals(RegionEventDescriptor.EventType.REGION_OPEN, desc.getEventType());
        assertTrue(Bytes.equals(desc.getTableName().toByteArray(), htd.getName()));
        assertTrue(Bytes.equals(desc.getEncodedRegionName().toByteArray(), hri.getEncodedNameAsBytes()));
        assertTrue(desc.getLogSequenceNumber() > 0);
        assertEquals(serverName, ProtobufUtil.toServerName(desc.getServer()));
        assertEquals(2, desc.getStoresCount());
        StoreDescriptor store = desc.getStores(0);
        assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam1));
        assertEquals(store.getStoreHomeDir(), Bytes.toString(fam1));
        // 1store file
        assertEquals(1, store.getStoreFileCount());
        // ensure path is relative
        assertFalse(store.getStoreFile(0).contains("/"));
        store = desc.getStores(1);
        assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam2));
        assertEquals(store.getStoreHomeDir(), Bytes.toString(fam2));
        // no store files
        assertEquals(0, store.getStoreFileCount());
    } finally {
        HBaseTestingUtility.closeRegionAndWAL(region);
    }
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) WAL(org.apache.hadoop.hbase.wal.WAL) MetricsWAL(org.apache.hadoop.hbase.regionserver.wal.MetricsWAL) WALEdit(org.apache.hadoop.hbase.regionserver.wal.WALEdit) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) ServerName(org.apache.hadoop.hbase.ServerName) RegionEventDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) StoreDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) Test(org.junit.Test)

Example 8 with StoreDescriptor

use of org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor in project hbase by apache.

the class TestHRegion method testCloseRegionWrittenToWAL.

@Test
public void testCloseRegionWrittenToWAL() throws Exception {
    final ServerName serverName = ServerName.valueOf("testCloseRegionWrittenToWAL", 100, 42);
    final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
    htd.addFamily(new HColumnDescriptor(fam1));
    htd.addFamily(new HColumnDescriptor(fam2));
    final HRegionInfo hri = new HRegionInfo(htd.getTableName(), HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
    ArgumentCaptor<WALEdit> editCaptor = ArgumentCaptor.forClass(WALEdit.class);
    // capture append() calls
    WAL wal = mockWAL();
    when(rss.getWAL((HRegionInfo) any())).thenReturn(wal);
    // open a region first so that it can be closed later
    region = HRegion.openHRegion(hri, htd, rss.getWAL(hri), TEST_UTIL.getConfiguration(), rss, null);
    // close the region
    region.close(false);
    // 2 times, one for region open, the other close region
    verify(wal, times(2)).append((HRegionInfo) any(), (WALKey) any(), editCaptor.capture(), anyBoolean());
    WALEdit edit = editCaptor.getAllValues().get(1);
    assertNotNull(edit);
    assertNotNull(edit.getCells());
    assertEquals(1, edit.getCells().size());
    RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getCells().get(0));
    assertNotNull(desc);
    LOG.info("RegionEventDescriptor from WAL: " + desc);
    assertEquals(RegionEventDescriptor.EventType.REGION_CLOSE, desc.getEventType());
    assertTrue(Bytes.equals(desc.getTableName().toByteArray(), htd.getName()));
    assertTrue(Bytes.equals(desc.getEncodedRegionName().toByteArray(), hri.getEncodedNameAsBytes()));
    assertTrue(desc.getLogSequenceNumber() > 0);
    assertEquals(serverName, ProtobufUtil.toServerName(desc.getServer()));
    assertEquals(2, desc.getStoresCount());
    StoreDescriptor store = desc.getStores(0);
    assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam1));
    assertEquals(store.getStoreHomeDir(), Bytes.toString(fam1));
    // no store files
    assertEquals(0, store.getStoreFileCount());
    store = desc.getStores(1);
    assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam2));
    assertEquals(store.getStoreHomeDir(), Bytes.toString(fam2));
    // no store files
    assertEquals(0, store.getStoreFileCount());
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) WAL(org.apache.hadoop.hbase.wal.WAL) MetricsWAL(org.apache.hadoop.hbase.regionserver.wal.MetricsWAL) WALEdit(org.apache.hadoop.hbase.regionserver.wal.WALEdit) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) ServerName(org.apache.hadoop.hbase.ServerName) RegionEventDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) StoreDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) Test(org.junit.Test)

Example 9 with StoreDescriptor

use of org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor in project hbase by apache.

the class TestHRegion method testOpenRegionWrittenToWALForLogReplay.

@Test
public void testOpenRegionWrittenToWALForLogReplay() throws Exception {
    // similar to the above test but with distributed log replay
    final ServerName serverName = ServerName.valueOf(name.getMethodName(), 100, 42);
    final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
    htd.addFamily(new HColumnDescriptor(fam1));
    htd.addFamily(new HColumnDescriptor(fam2));
    HRegionInfo hri = new HRegionInfo(htd.getTableName(), HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
    // open the region w/o rss and wal and flush some files
    HRegion region = HBaseTestingUtility.createRegionAndWAL(hri, TEST_UTIL.getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
    assertNotNull(region);
    // create a file in fam1 for the region before opening in OpenRegionHandler
    region.put(new Put(Bytes.toBytes("a")).addColumn(fam1, fam1, fam1));
    region.flush(true);
    HBaseTestingUtility.closeRegionAndWAL(region);
    ArgumentCaptor<WALEdit> editCaptor = ArgumentCaptor.forClass(WALEdit.class);
    // capture append() calls
    WAL wal = mockWAL();
    when(rss.getWAL((HRegionInfo) any())).thenReturn(wal);
    // add the region to recovering regions
    HashMap<String, Region> recoveringRegions = Maps.newHashMap();
    recoveringRegions.put(region.getRegionInfo().getEncodedName(), null);
    when(rss.getRecoveringRegions()).thenReturn(recoveringRegions);
    try {
        Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
        conf.set(HConstants.REGION_IMPL, HRegionWithSeqId.class.getName());
        region = HRegion.openHRegion(hri, htd, rss.getWAL(hri), conf, rss, null);
        // verify that we have not appended region open event to WAL because this region is still
        // recovering
        verify(wal, times(0)).append((HRegionInfo) any(), (WALKey) any(), editCaptor.capture(), anyBoolean());
        // not put the region out of recovering state
        new FinishRegionRecoveringHandler(rss, region.getRegionInfo().getEncodedName(), "/foo").prepare().process();
        // now we should have put the entry
        verify(wal, times(1)).append((HRegionInfo) any(), (WALKey) any(), editCaptor.capture(), anyBoolean());
        WALEdit edit = editCaptor.getValue();
        assertNotNull(edit);
        assertNotNull(edit.getCells());
        assertEquals(1, edit.getCells().size());
        RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getCells().get(0));
        assertNotNull(desc);
        LOG.info("RegionEventDescriptor from WAL: " + desc);
        assertEquals(RegionEventDescriptor.EventType.REGION_OPEN, desc.getEventType());
        assertTrue(Bytes.equals(desc.getTableName().toByteArray(), htd.getName()));
        assertTrue(Bytes.equals(desc.getEncodedRegionName().toByteArray(), hri.getEncodedNameAsBytes()));
        assertTrue(desc.getLogSequenceNumber() > 0);
        assertEquals(serverName, ProtobufUtil.toServerName(desc.getServer()));
        assertEquals(2, desc.getStoresCount());
        StoreDescriptor store = desc.getStores(0);
        assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam1));
        assertEquals(store.getStoreHomeDir(), Bytes.toString(fam1));
        // 1store file
        assertEquals(1, store.getStoreFileCount());
        // ensure path is relative
        assertFalse(store.getStoreFile(0).contains("/"));
        store = desc.getStores(1);
        assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam2));
        assertEquals(store.getStoreHomeDir(), Bytes.toString(fam2));
        // no store files
        assertEquals(0, store.getStoreFileCount());
    } finally {
        HBaseTestingUtility.closeRegionAndWAL(region);
    }
}
Also used : WAL(org.apache.hadoop.hbase.wal.WAL) MetricsWAL(org.apache.hadoop.hbase.regionserver.wal.MetricsWAL) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) ByteString(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) StoreDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) WALEdit(org.apache.hadoop.hbase.regionserver.wal.WALEdit) FinishRegionRecoveringHandler(org.apache.hadoop.hbase.regionserver.handler.FinishRegionRecoveringHandler) ServerName(org.apache.hadoop.hbase.ServerName) RegionEventDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor) Test(org.junit.Test)

Example 10 with StoreDescriptor

use of org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor in project hbase by apache.

the class ReplicationSink method buildBulkLoadHFileMap.

private void buildBulkLoadHFileMap(final Map<String, List<Pair<byte[], List<String>>>> bulkLoadHFileMap, TableName table, Cell cell) throws IOException {
    BulkLoadDescriptor bld = WALEdit.getBulkLoadDescriptor(cell);
    List<StoreDescriptor> storesList = bld.getStoresList();
    int storesSize = storesList.size();
    for (int j = 0; j < storesSize; j++) {
        StoreDescriptor storeDescriptor = storesList.get(j);
        List<String> storeFileList = storeDescriptor.getStoreFileList();
        int storeFilesSize = storeFileList.size();
        hfilesReplicated += storeFilesSize;
        for (int k = 0; k < storeFilesSize; k++) {
            byte[] family = storeDescriptor.getFamilyName().toByteArray();
            // Build hfile relative path from its namespace
            String pathToHfileFromNS = getHFilePath(table, bld, storeFileList.get(k), family);
            String tableName = table.getNameWithNamespaceInclAsString();
            if (bulkLoadHFileMap.containsKey(tableName)) {
                List<Pair<byte[], List<String>>> familyHFilePathsList = bulkLoadHFileMap.get(tableName);
                boolean foundFamily = false;
                for (int i = 0; i < familyHFilePathsList.size(); i++) {
                    Pair<byte[], List<String>> familyHFilePathsPair = familyHFilePathsList.get(i);
                    if (Bytes.equals(familyHFilePathsPair.getFirst(), family)) {
                        // Found family already present, just add the path to the existing list
                        familyHFilePathsPair.getSecond().add(pathToHfileFromNS);
                        foundFamily = true;
                        break;
                    }
                }
                if (!foundFamily) {
                    // Family not found, add this family and its hfile paths pair to the list
                    addFamilyAndItsHFilePathToTableInMap(family, pathToHfileFromNS, familyHFilePathsList);
                }
            } else {
                // Add this table entry into the map
                addNewTableEntryInMap(bulkLoadHFileMap, family, pathToHfileFromNS, tableName);
            }
        }
    }
}
Also used : BulkLoadDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor) ArrayList(java.util.ArrayList) List(java.util.List) StoreDescriptor(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor) Pair(org.apache.hadoop.hbase.util.Pair)

Aggregations

StoreDescriptor (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor)10 BulkLoadDescriptor (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor)5 WAL (org.apache.hadoop.hbase.wal.WAL)4 Test (org.junit.Test)4 IOException (java.io.IOException)3 ArrayList (java.util.ArrayList)3 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)3 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)3 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)3 ServerName (org.apache.hadoop.hbase.ServerName)3 MetricsWAL (org.apache.hadoop.hbase.regionserver.wal.MetricsWAL)3 WALEdit (org.apache.hadoop.hbase.regionserver.wal.WALEdit)3 RegionEventDescriptor (org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor)3 Pair (org.apache.hadoop.hbase.util.Pair)3 FileNotFoundException (java.io.FileNotFoundException)2 Path (org.apache.hadoop.fs.Path)2 Cell (org.apache.hadoop.hbase.Cell)2 Put (org.apache.hadoop.hbase.client.Put)2 List (java.util.List)1 Random (java.util.Random)1