Search in sources :

Example 1 with StoreFileWritersCapture

use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture in project hbase by apache.

the class TestDateTieredCompactor method testEmptyOutputFile.

@Test
public void testEmptyOutputFile() throws Exception {
    StoreFileWritersCapture writers = new StoreFileWritersCapture();
    CompactionRequestImpl request = createDummyRequest();
    DateTieredCompactor dtc = createCompactor(writers, new KeyValue[0], new ArrayList<>(request.getFiles()));
    List<Path> paths = dtc.compact(request, Arrays.asList(Long.MIN_VALUE, Long.MAX_VALUE), new HashMap<Long, String>(), NoLimitThroughputController.INSTANCE, null);
    assertEquals(1, paths.size());
    List<StoreFileWritersCapture.Writer> dummyWriters = writers.getWriters();
    assertEquals(1, dummyWriters.size());
    StoreFileWritersCapture.Writer dummyWriter = dummyWriters.get(0);
    assertTrue(dummyWriter.kvs.isEmpty());
    assertTrue(dummyWriter.hasMetadata);
}
Also used : Path(org.apache.hadoop.fs.Path) OptionalLong(java.util.OptionalLong) StoreFileWritersCapture(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture) Test(org.junit.Test)

Example 2 with StoreFileWritersCapture

use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture in project hbase by apache.

the class TestStripeCompactor method verifyBoundaryCompaction.

private void verifyBoundaryCompaction(KeyValue[] input, byte[][] boundaries, KeyValue[][] output, byte[] majorFrom, byte[] majorTo, boolean allFiles) throws Exception {
    StoreFileWritersCapture writers = new StoreFileWritersCapture();
    StripeCompactor sc = createCompactor(writers, input);
    List<Path> paths = sc.compact(createDummyRequest(), Arrays.asList(boundaries), majorFrom, majorTo, NoLimitThroughputController.INSTANCE, null);
    writers.verifyKvs(output, allFiles, true);
    if (allFiles) {
        assertEquals(output.length, paths.size());
        writers.verifyBoundaries(boundaries);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWritersCapture(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture)

Example 3 with StoreFileWritersCapture

use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture in project hbase by apache.

the class TestStripeCompactor method verifySizeCompaction.

private void verifySizeCompaction(KeyValue[] input, int targetCount, long targetSize, byte[] left, byte[] right, KeyValue[][] output) throws Exception {
    StoreFileWritersCapture writers = new StoreFileWritersCapture();
    StripeCompactor sc = createCompactor(writers, input);
    List<Path> paths = sc.compact(createDummyRequest(), targetCount, targetSize, left, right, null, null, NoLimitThroughputController.INSTANCE, null);
    assertEquals(output.length, paths.size());
    writers.verifyKvs(output, true, true);
    List<byte[]> boundaries = new ArrayList<>(output.length + 2);
    boundaries.add(left);
    for (int i = 1; i < output.length; ++i) {
        boundaries.add(CellUtil.cloneRow(output[i][0]));
    }
    boundaries.add(right);
    writers.verifyBoundaries(boundaries.toArray(new byte[][] {}));
}
Also used : Path(org.apache.hadoop.fs.Path) ArrayList(java.util.ArrayList) StoreFileWritersCapture(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture)

Example 4 with StoreFileWritersCapture

use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture in project hbase by apache.

the class TestStripeCompactionPolicy method createCompactor.

private StripeCompactor createCompactor() throws Exception {
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("foo"));
    StoreFileWritersCapture writers = new StoreFileWritersCapture();
    HStore store = mock(HStore.class);
    RegionInfo info = mock(RegionInfo.class);
    when(info.getRegionNameAsString()).thenReturn("testRegion");
    when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
    when(store.getRegionInfo()).thenReturn(info);
    StoreEngine storeEngine = mock(StoreEngine.class);
    when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
    when(store.getStoreEngine()).thenReturn(storeEngine);
    Configuration conf = HBaseConfiguration.create();
    conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
    final Scanner scanner = new Scanner();
    return new StripeCompactor(conf, store) {

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
            return scanner;
        }

        @Override
        protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
            return scanner;
        }
    };
}
Also used : StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) InternalScanner(org.apache.hadoop.hbase.regionserver.InternalScanner) ScanType(org.apache.hadoop.hbase.regionserver.ScanType) StoreEngine(org.apache.hadoop.hbase.regionserver.StoreEngine) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) ScanInfo(org.apache.hadoop.hbase.regionserver.ScanInfo) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList) CreateStoreFileWriterParams(org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) HStore(org.apache.hadoop.hbase.regionserver.HStore) StoreFileWritersCapture(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture)

Example 5 with StoreFileWritersCapture

use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture in project hbase by apache.

the class TestStripeCompactionPolicy method verifyFlush.

/**
 * Verify arbitrary flush.
 */
protected void verifyFlush(StripeCompactionPolicy policy, StripeInformationProvider si, KeyValue[] input, KeyValue[][] expected, byte[][] boundaries) throws IOException {
    StoreFileWritersCapture writers = new StoreFileWritersCapture();
    StripeStoreFlusher.StripeFlushRequest req = policy.selectFlush(CellComparatorImpl.COMPARATOR, si, input.length);
    StripeMultiFileWriter mw = req.createWriter();
    mw.init(null, writers);
    for (KeyValue kv : input) {
        mw.append(kv);
    }
    boolean hasMetadata = boundaries != null;
    mw.commitWriters(0, false);
    writers.verifyKvs(expected, true, hasMetadata);
    if (hasMetadata) {
        writers.verifyBoundaries(boundaries);
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) StripeStoreFlusher(org.apache.hadoop.hbase.regionserver.StripeStoreFlusher) StripeMultiFileWriter(org.apache.hadoop.hbase.regionserver.StripeMultiFileWriter) StoreFileWritersCapture(org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture)

Aggregations

StoreFileWritersCapture (org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture)6 Path (org.apache.hadoop.fs.Path)4 ArrayList (java.util.ArrayList)2 OptionalLong (java.util.OptionalLong)2 List (java.util.List)1 Configuration (org.apache.hadoop.conf.Configuration)1 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)1 KeyValue (org.apache.hadoop.hbase.KeyValue)1 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)1 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)1 CreateStoreFileWriterParams (org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams)1 HStore (org.apache.hadoop.hbase.regionserver.HStore)1 HStoreFile (org.apache.hadoop.hbase.regionserver.HStoreFile)1 InternalScanner (org.apache.hadoop.hbase.regionserver.InternalScanner)1 ScanInfo (org.apache.hadoop.hbase.regionserver.ScanInfo)1 ScanType (org.apache.hadoop.hbase.regionserver.ScanType)1 StoreEngine (org.apache.hadoop.hbase.regionserver.StoreEngine)1 StoreFileScanner (org.apache.hadoop.hbase.regionserver.StoreFileScanner)1 StripeMultiFileWriter (org.apache.hadoop.hbase.regionserver.StripeMultiFileWriter)1 StripeStoreFlusher (org.apache.hadoop.hbase.regionserver.StripeStoreFlusher)1