Search in sources :

Example 1 with CellSet

use of org.apache.hadoop.hbase.regionserver.CellSet in project hbase by apache.

the class BoundedRecoveredHFilesOutputSink method append.

@Override
void append(RegionEntryBuffer buffer) throws IOException {
    Map<String, CellSet> familyCells = new HashMap<>();
    Map<String, Long> familySeqIds = new HashMap<>();
    boolean isMetaTable = buffer.tableName.equals(META_TABLE_NAME);
    // sequence id.
    for (WAL.Entry entry : buffer.entryBuffer) {
        long seqId = entry.getKey().getSequenceId();
        List<Cell> cells = entry.getEdit().getCells();
        for (Cell cell : cells) {
            if (CellUtil.matchingFamily(cell, WALEdit.METAFAMILY)) {
                continue;
            }
            PrivateCellUtil.setSequenceId(cell, seqId);
            String familyName = Bytes.toString(CellUtil.cloneFamily(cell));
            // comparator need to be specified for meta
            familyCells.computeIfAbsent(familyName, key -> new CellSet(isMetaTable ? MetaCellComparator.META_COMPARATOR : CellComparatorImpl.COMPARATOR)).add(cell);
            familySeqIds.compute(familyName, (k, v) -> v == null ? seqId : Math.max(v, seqId));
        }
    }
    // Create a new hfile writer for each column family, write edits then close writer.
    String regionName = Bytes.toString(buffer.encodedRegionName);
    for (Map.Entry<String, CellSet> cellsEntry : familyCells.entrySet()) {
        String familyName = cellsEntry.getKey();
        StoreFileWriter writer = createRecoveredHFileWriter(buffer.tableName, regionName, familySeqIds.get(familyName), familyName, isMetaTable);
        LOG.trace("Created {}", writer.getPath());
        openingWritersNum.incrementAndGet();
        try {
            for (Cell cell : cellsEntry.getValue()) {
                writer.append(cell);
            }
            // Append the max seqid to hfile, used when recovery.
            writer.appendMetadata(familySeqIds.get(familyName), false);
            regionEditsWrittenMap.compute(Bytes.toString(buffer.encodedRegionName), (k, v) -> v == null ? buffer.entryBuffer.size() : v + buffer.entryBuffer.size());
            splits.add(writer.getPath());
            openingWritersNum.decrementAndGet();
        } finally {
            writer.close();
            LOG.trace("Closed {}, edits={}", writer.getPath(), familyCells.size());
        }
    }
}
Also used : Entry(org.apache.hadoop.hbase.wal.WAL.Entry) LoggerFactory(org.slf4j.LoggerFactory) HashMap(java.util.HashMap) InterruptedIOException(java.io.InterruptedIOException) CellSet(org.apache.hadoop.hbase.regionserver.CellSet) ConcurrentMap(java.util.concurrent.ConcurrentMap) Entry(org.apache.hadoop.hbase.wal.WAL.Entry) Future(java.util.concurrent.Future) RegionEntryBuffer(org.apache.hadoop.hbase.wal.EntryBuffers.RegionEntryBuffer) CellComparatorImpl(org.apache.hadoop.hbase.CellComparatorImpl) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) StoreUtils(org.apache.hadoop.hbase.regionserver.StoreUtils) Map(java.util.Map) Path(org.apache.hadoop.fs.Path) MetaCellComparator(org.apache.hadoop.hbase.MetaCellComparator) Cell(org.apache.hadoop.hbase.Cell) Bytes(org.apache.hadoop.hbase.util.Bytes) TableName(org.apache.hadoop.hbase.TableName) Logger(org.slf4j.Logger) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) CellUtil(org.apache.hadoop.hbase.CellUtil) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) List(java.util.List) InterfaceAudience(org.apache.yetus.audience.InterfaceAudience) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) PrivateCellUtil(org.apache.hadoop.hbase.PrivateCellUtil) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) META_TABLE_NAME(org.apache.hadoop.hbase.TableName.META_TABLE_NAME) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) HashMap(java.util.HashMap) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Cell(org.apache.hadoop.hbase.Cell) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) CellSet(org.apache.hadoop.hbase.regionserver.CellSet)

Aggregations

IOException (java.io.IOException)1 InterruptedIOException (java.io.InterruptedIOException)1 HashMap (java.util.HashMap)1 List (java.util.List)1 Map (java.util.Map)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 ConcurrentMap (java.util.concurrent.ConcurrentMap)1 ExecutionException (java.util.concurrent.ExecutionException)1 Future (java.util.concurrent.Future)1 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1 Path (org.apache.hadoop.fs.Path)1 Cell (org.apache.hadoop.hbase.Cell)1 CellComparatorImpl (org.apache.hadoop.hbase.CellComparatorImpl)1 CellUtil (org.apache.hadoop.hbase.CellUtil)1 MetaCellComparator (org.apache.hadoop.hbase.MetaCellComparator)1 PrivateCellUtil (org.apache.hadoop.hbase.PrivateCellUtil)1 TableName (org.apache.hadoop.hbase.TableName)1 META_TABLE_NAME (org.apache.hadoop.hbase.TableName.META_TABLE_NAME)1 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)1 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)1