Search in sources :

Example 16 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestCachedMobFile method testCompare.

@Test
public void testCompare() throws Exception {
    String caseName = getName();
    Path testDir = TEST_UTIL.getDataTestDir();
    FileSystem fs = testDir.getFileSystem(conf);
    Path outputDir1 = new Path(testDir, FAMILY1);
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
    StoreFileWriter writer1 = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(outputDir1).withFileContext(meta).build();
    MobTestUtil.writeStoreFile(writer1, caseName);
    CachedMobFile cachedMobFile1 = CachedMobFile.create(fs, writer1.getPath(), conf, cacheConf);
    Path outputDir2 = new Path(testDir, FAMILY2);
    StoreFileWriter writer2 = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(outputDir2).withFileContext(meta).build();
    MobTestUtil.writeStoreFile(writer2, caseName);
    CachedMobFile cachedMobFile2 = CachedMobFile.create(fs, writer2.getPath(), conf, cacheConf);
    cachedMobFile1.access(1);
    cachedMobFile2.access(2);
    Assert.assertEquals(cachedMobFile1.compareTo(cachedMobFile2), 1);
    Assert.assertEquals(cachedMobFile2.compareTo(cachedMobFile1), -1);
    Assert.assertEquals(cachedMobFile1.compareTo(cachedMobFile1), 0);
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) FileSystem(org.apache.hadoop.fs.FileSystem) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 17 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class DataBlockEncodingTool method checkStatistics.

/**
   * Check statistics for given HFile for different data block encoders.
   * @param scanner Of file which will be compressed.
   * @param kvLimit Maximal count of KeyValue which will be processed.
   * @throws IOException thrown if scanner is invalid
   */
public void checkStatistics(final KeyValueScanner scanner, final int kvLimit) throws IOException {
    scanner.seek(KeyValue.LOWESTKEY);
    KeyValue currentKV;
    byte[] previousKey = null;
    byte[] currentKey;
    DataBlockEncoding[] encodings = DataBlockEncoding.values();
    ByteArrayOutputStream uncompressedOutputStream = new ByteArrayOutputStream();
    int j = 0;
    while ((currentKV = KeyValueUtil.ensureKeyValue(scanner.next())) != null && j < kvLimit) {
        // Iterates through key/value pairs
        j++;
        currentKey = currentKV.getKey();
        if (previousKey != null) {
            for (int i = 0; i < previousKey.length && i < currentKey.length && previousKey[i] == currentKey[i]; ++i) {
                totalKeyRedundancyLength++;
            }
        }
        uncompressedOutputStream.write(currentKV.getBuffer(), currentKV.getOffset(), currentKV.getLength());
        previousKey = currentKey;
        int kLen = currentKV.getKeyLength();
        int vLen = currentKV.getValueLength();
        int cfLen = currentKV.getFamilyLength(currentKV.getFamilyOffset());
        int restLen = currentKV.getLength() - kLen - vLen;
        totalKeyLength += kLen;
        totalValueLength += vLen;
        totalPrefixLength += restLen;
        totalCFLength += cfLen;
    }
    rawKVs = uncompressedOutputStream.toByteArray();
    boolean useTag = (currentKV.getTagsLength() > 0);
    for (DataBlockEncoding encoding : encodings) {
        if (encoding == DataBlockEncoding.NONE) {
            continue;
        }
        DataBlockEncoder d = encoding.getEncoder();
        HFileContext meta = new HFileContextBuilder().withCompression(Compression.Algorithm.NONE).withIncludesMvcc(includesMemstoreTS).withIncludesTags(useTag).build();
        codecs.add(new EncodedDataBlock(d, encoding, rawKVs, meta));
    }
}
Also used : DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) KeyValue(org.apache.hadoop.hbase.KeyValue) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataBlockEncoder(org.apache.hadoop.hbase.io.encoding.DataBlockEncoder) EncodedDataBlock(org.apache.hadoop.hbase.io.encoding.EncodedDataBlock) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 18 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestFSErrorsExposed method testStoreFileScannerThrowsErrors.

/**
   * Injects errors into the pread calls of an on-disk file, and makes
   * sure those bubble up to the StoreFileScanner
   */
@Test
public void testStoreFileScannerThrowsErrors() throws IOException {
    Path hfilePath = new Path(new Path(util.getDataTestDir("internalScannerExposesErrors"), "regionname"), "familyname");
    HFileSystem hfs = (HFileSystem) util.getTestFileSystem();
    FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
    HFileSystem fs = new HFileSystem(faultyfs);
    CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(util.getConfiguration(), cacheConf, hfs).withOutputDir(hfilePath).withFileContext(meta).build();
    TestStoreFile.writeStoreFile(writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
    StoreFile sf = new StoreFile(fs, writer.getPath(), util.getConfiguration(), cacheConf, BloomType.NONE);
    List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(Collections.singletonList(sf), false, true, false, false, // 0 is passed as readpoint because this test operates on StoreFile directly
    0);
    KeyValueScanner scanner = scanners.get(0);
    FaultyInputStream inStream = faultyfs.inStreams.get(0).get();
    assertNotNull(inStream);
    scanner.seek(KeyValue.LOWESTKEY);
    // Do at least one successful read
    assertNotNull(scanner.next());
    faultyfs.startFaults();
    try {
        int scanned = 0;
        while (scanner.next() != null) {
            scanned++;
        }
        fail("Scanner didn't throw after faults injected");
    } catch (IOException ioe) {
        LOG.info("Got expected exception", ioe);
        assertTrue(ioe.getMessage().contains("Could not iterate"));
    }
    scanner.close();
}
Also used : Path(org.apache.hadoop.fs.Path) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) IOException(java.io.IOException) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) HFileSystem(org.apache.hadoop.hbase.fs.HFileSystem) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Test(org.junit.Test)

Example 19 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestHRegionReplayEvents method createHFileForFamilies.

private String createHFileForFamilies(Path testPath, byte[] family, byte[] valueBytes) throws IOException {
    HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(TEST_UTIL.getConfiguration());
    // TODO We need a way to do this without creating files
    Path testFile = new Path(testPath, UUID.randomUUID().toString());
    FSDataOutputStream out = TEST_UTIL.getTestFileSystem().create(testFile);
    try {
        hFileFactory.withOutputStream(out);
        hFileFactory.withFileContext(new HFileContext());
        HFile.Writer writer = hFileFactory.create();
        try {
            writer.append(new KeyValue(CellUtil.createCell(valueBytes, family, valueBytes, 0l, KeyValue.Type.Put.getCode(), valueBytes)));
        } finally {
            writer.close();
        }
    } finally {
        out.close();
    }
    return testFile.toString();
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) HFile(org.apache.hadoop.hbase.io.hfile.HFile) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 20 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestMobStoreCompaction method createHFile.

/**
   * Create an HFile with the given number of bytes
   */
private void createHFile(Path path, int rowIdx, byte[] dummyData) throws IOException {
    HFileContext meta = new HFileContextBuilder().build();
    HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path).withFileContext(meta).create();
    long now = System.currentTimeMillis();
    try {
        KeyValue kv = new KeyValue(Bytes.add(STARTROW, Bytes.toBytes(rowIdx)), COLUMN_FAMILY, Bytes.toBytes("colX"), now, dummyData);
        writer.append(kv);
    } finally {
        writer.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY, Bytes.toBytes(System.currentTimeMillis()));
        writer.close();
    }
}
Also used : HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Aggregations

HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)55 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)51 Path (org.apache.hadoop.fs.Path)34 Test (org.junit.Test)31 KeyValue (org.apache.hadoop.hbase.KeyValue)24 FileSystem (org.apache.hadoop.fs.FileSystem)21 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)21 Cell (org.apache.hadoop.hbase.Cell)12 HFile (org.apache.hadoop.hbase.io.hfile.HFile)11 ByteBuffer (java.nio.ByteBuffer)10 Configuration (org.apache.hadoop.conf.Configuration)9 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)7 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 DataOutputStream (java.io.DataOutputStream)5 SingleByteBuff (org.apache.hadoop.hbase.nio.SingleByteBuff)5 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)4 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)4 PrefixTreeCodec (org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec)4 EncodedSeeker (org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker)4