use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture in project hbase by apache.
the class TestDateTieredCompactor method testEmptyOutputFile.
@Test
public void testEmptyOutputFile() throws Exception {
StoreFileWritersCapture writers = new StoreFileWritersCapture();
CompactionRequestImpl request = createDummyRequest();
DateTieredCompactor dtc = createCompactor(writers, new KeyValue[0], new ArrayList<>(request.getFiles()));
List<Path> paths = dtc.compact(request, Arrays.asList(Long.MIN_VALUE, Long.MAX_VALUE), new HashMap<Long, String>(), NoLimitThroughputController.INSTANCE, null);
assertEquals(1, paths.size());
List<StoreFileWritersCapture.Writer> dummyWriters = writers.getWriters();
assertEquals(1, dummyWriters.size());
StoreFileWritersCapture.Writer dummyWriter = dummyWriters.get(0);
assertTrue(dummyWriter.kvs.isEmpty());
assertTrue(dummyWriter.hasMetadata);
}
use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture in project hbase by apache.
the class TestStripeCompactor method verifyBoundaryCompaction.
private void verifyBoundaryCompaction(KeyValue[] input, byte[][] boundaries, KeyValue[][] output, byte[] majorFrom, byte[] majorTo, boolean allFiles) throws Exception {
StoreFileWritersCapture writers = new StoreFileWritersCapture();
StripeCompactor sc = createCompactor(writers, input);
List<Path> paths = sc.compact(createDummyRequest(), Arrays.asList(boundaries), majorFrom, majorTo, NoLimitThroughputController.INSTANCE, null);
writers.verifyKvs(output, allFiles, true);
if (allFiles) {
assertEquals(output.length, paths.size());
writers.verifyBoundaries(boundaries);
}
}
use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture in project hbase by apache.
the class TestStripeCompactor method verifySizeCompaction.
private void verifySizeCompaction(KeyValue[] input, int targetCount, long targetSize, byte[] left, byte[] right, KeyValue[][] output) throws Exception {
StoreFileWritersCapture writers = new StoreFileWritersCapture();
StripeCompactor sc = createCompactor(writers, input);
List<Path> paths = sc.compact(createDummyRequest(), targetCount, targetSize, left, right, null, null, NoLimitThroughputController.INSTANCE, null);
assertEquals(output.length, paths.size());
writers.verifyKvs(output, true, true);
List<byte[]> boundaries = new ArrayList<>(output.length + 2);
boundaries.add(left);
for (int i = 1; i < output.length; ++i) {
boundaries.add(CellUtil.cloneRow(output[i][0]));
}
boundaries.add(right);
writers.verifyBoundaries(boundaries.toArray(new byte[][] {}));
}
use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture in project hbase by apache.
the class TestStripeCompactionPolicy method createCompactor.
private StripeCompactor createCompactor() throws Exception {
ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("foo"));
StoreFileWritersCapture writers = new StoreFileWritersCapture();
HStore store = mock(HStore.class);
RegionInfo info = mock(RegionInfo.class);
when(info.getRegionNameAsString()).thenReturn("testRegion");
when(store.getColumnFamilyDescriptor()).thenReturn(familyDescriptor);
when(store.getRegionInfo()).thenReturn(info);
StoreEngine storeEngine = mock(StoreEngine.class);
when(storeEngine.createWriter(any(CreateStoreFileWriterParams.class))).thenAnswer(writers);
when(store.getStoreEngine()).thenReturn(storeEngine);
Configuration conf = HBaseConfiguration.create();
conf.setBoolean("hbase.regionserver.compaction.private.readers", usePrivateReaders);
final Scanner scanner = new Scanner();
return new StripeCompactor(conf, store) {
@Override
protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, long smallestReadPoint, long earliestPutTs, byte[] dropDeletesFromRow, byte[] dropDeletesToRow) throws IOException {
return scanner;
}
@Override
protected InternalScanner createScanner(HStore store, ScanInfo scanInfo, List<StoreFileScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException {
return scanner;
}
};
}
use of org.apache.hadoop.hbase.regionserver.compactions.TestCompactor.StoreFileWritersCapture in project hbase by apache.
the class TestStripeCompactionPolicy method verifyFlush.
/**
* Verify arbitrary flush.
*/
protected void verifyFlush(StripeCompactionPolicy policy, StripeInformationProvider si, KeyValue[] input, KeyValue[][] expected, byte[][] boundaries) throws IOException {
StoreFileWritersCapture writers = new StoreFileWritersCapture();
StripeStoreFlusher.StripeFlushRequest req = policy.selectFlush(CellComparatorImpl.COMPARATOR, si, input.length);
StripeMultiFileWriter mw = req.createWriter();
mw.init(null, writers);
for (KeyValue kv : input) {
mw.append(kv);
}
boolean hasMetadata = boundaries != null;
mw.commitWriters(0, false);
writers.verifyKvs(expected, true, hasMetadata);
if (hasMetadata) {
writers.verifyBoundaries(boundaries);
}
}
Aggregations