Search in sources :

Example 36 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestCachedMobFile method testReadKeyValue.

@Test
public void testReadKeyValue() throws Exception {
    Path testDir = TEST_UTIL.getDataTestDir();
    FileSystem fs = testDir.getFileSystem(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(testDir).withFileContext(meta).build();
    String caseName = getName();
    MobTestUtil.writeStoreFile(writer, caseName);
    CachedMobFile cachedMobFile = CachedMobFile.create(fs, writer.getPath(), conf, cacheConf);
    byte[] family = Bytes.toBytes(caseName);
    byte[] qualify = Bytes.toBytes(caseName);
    // Test the start key
    // The start key bytes
    byte[] startKey = Bytes.toBytes("aa");
    KeyValue expectedKey = new KeyValue(startKey, family, qualify, Long.MAX_VALUE, Type.Put, startKey);
    KeyValue seekKey = expectedKey.createKeyOnly(false);
    Cell cell = cachedMobFile.readCell(seekKey, false);
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the end key
    // The end key bytes
    byte[] endKey = Bytes.toBytes("zz");
    expectedKey = new KeyValue(endKey, family, qualify, Long.MAX_VALUE, Type.Put, endKey);
    seekKey = expectedKey.createKeyOnly(false);
    cell = cachedMobFile.readCell(seekKey, false);
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the random key
    byte[] randomKey = Bytes.toBytes(MobTestUtil.generateRandomString(2));
    expectedKey = new KeyValue(randomKey, family, qualify, Long.MAX_VALUE, Type.Put, randomKey);
    seekKey = expectedKey.createKeyOnly(false);
    cell = cachedMobFile.readCell(seekKey, false);
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the key which is less than the start key
    // Smaller than "aa"
    byte[] lowerKey = Bytes.toBytes("a1");
    expectedKey = new KeyValue(startKey, family, qualify, Long.MAX_VALUE, Type.Put, startKey);
    seekKey = new KeyValue(lowerKey, family, qualify, Long.MAX_VALUE, Type.Put, lowerKey);
    cell = cachedMobFile.readCell(seekKey, false);
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the key which is more than the end key
    // Bigger than "zz"
    byte[] upperKey = Bytes.toBytes("z{");
    seekKey = new KeyValue(upperKey, family, qualify, Long.MAX_VALUE, Type.Put, upperKey);
    cell = cachedMobFile.readCell(seekKey, false);
    Assert.assertNull(cell);
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) KeyValue(org.apache.hadoop.hbase.KeyValue) FileSystem(org.apache.hadoop.fs.FileSystem) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) Cell(org.apache.hadoop.hbase.Cell) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 37 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestMobFile method testReadKeyValue.

@Test
public void testReadKeyValue() throws Exception {
    Path testDir = TEST_UTIL.getDataTestDir();
    FileSystem fs = testDir.getFileSystem(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(testDir).withFileContext(meta).build();
    String caseName = getName();
    MobTestUtil.writeStoreFile(writer, caseName);
    MobFile mobFile = new MobFile(new StoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE));
    byte[] family = Bytes.toBytes(caseName);
    byte[] qualify = Bytes.toBytes(caseName);
    // Test the start key
    // The start key bytes
    byte[] startKey = Bytes.toBytes("aa");
    KeyValue expectedKey = new KeyValue(startKey, family, qualify, Long.MAX_VALUE, Type.Put, startKey);
    KeyValue seekKey = expectedKey.createKeyOnly(false);
    Cell cell = mobFile.readCell(seekKey, false);
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the end key
    // The end key bytes
    byte[] endKey = Bytes.toBytes("zz");
    expectedKey = new KeyValue(endKey, family, qualify, Long.MAX_VALUE, Type.Put, endKey);
    seekKey = expectedKey.createKeyOnly(false);
    cell = mobFile.readCell(seekKey, false);
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the random key
    byte[] randomKey = Bytes.toBytes(MobTestUtil.generateRandomString(2));
    expectedKey = new KeyValue(randomKey, family, qualify, Long.MAX_VALUE, Type.Put, randomKey);
    seekKey = expectedKey.createKeyOnly(false);
    cell = mobFile.readCell(seekKey, false);
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the key which is less than the start key
    // Smaller than "aa"
    byte[] lowerKey = Bytes.toBytes("a1");
    expectedKey = new KeyValue(startKey, family, qualify, Long.MAX_VALUE, Type.Put, startKey);
    seekKey = new KeyValue(lowerKey, family, qualify, Long.MAX_VALUE, Type.Put, lowerKey);
    cell = mobFile.readCell(seekKey, false);
    MobTestUtil.assertCellEquals(expectedKey, cell);
    // Test the key which is more than the end key
    // Bigger than "zz"
    byte[] upperKey = Bytes.toBytes("z{");
    seekKey = new KeyValue(upperKey, family, qualify, Long.MAX_VALUE, Type.Put, upperKey);
    cell = mobFile.readCell(seekKey, false);
    assertNull(cell);
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) KeyValue(org.apache.hadoop.hbase.KeyValue) FileSystem(org.apache.hadoop.fs.FileSystem) StoreFile(org.apache.hadoop.hbase.regionserver.StoreFile) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) Cell(org.apache.hadoop.hbase.Cell) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 38 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestMobFile method testGetScanner.

@Test
public void testGetScanner() throws Exception {
    Path testDir = TEST_UTIL.getDataTestDir();
    FileSystem fs = testDir.getFileSystem(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(testDir).withFileContext(meta).build();
    MobTestUtil.writeStoreFile(writer, getName());
    MobFile mobFile = new MobFile(new StoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE));
    assertNotNull(mobFile.getScanner());
    assertTrue(mobFile.getScanner() instanceof StoreFileScanner);
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) FileSystem(org.apache.hadoop.fs.FileSystem) StoreFile(org.apache.hadoop.hbase.regionserver.StoreFile) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) Test(org.junit.Test)

Example 39 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class CreateRandomStoreFile method run.

/**
   * Runs the tools.
   *
   * @param args command-line arguments
   * @return true in case of success
   * @throws IOException
   */
public boolean run(String[] args) throws IOException {
    options.addOption(OUTPUT_DIR_OPTION, "output_dir", true, "Output directory");
    options.addOption(NUM_KV_OPTION, "num_kv", true, "Number of key/value pairs");
    options.addOption(KEY_SIZE_OPTION, "key_size", true, "Average key size");
    options.addOption(VALUE_SIZE_OPTION, "value_size", true, "Average value size");
    options.addOption(HFILE_VERSION_OPTION, "hfile_version", true, "HFile version to create");
    options.addOption(COMPRESSION_OPTION, "compression", true, " Compression type, one of " + Arrays.toString(Compression.Algorithm.values()));
    options.addOption(BLOOM_FILTER_OPTION, "bloom_filter", true, "Bloom filter type, one of " + Arrays.toString(BloomType.values()));
    options.addOption(BLOCK_SIZE_OPTION, "block_size", true, "HFile block size");
    options.addOption(BLOOM_BLOCK_SIZE_OPTION, "bloom_block_size", true, "Compound Bloom filters block size");
    options.addOption(INDEX_BLOCK_SIZE_OPTION, "index_block_size", true, "Index block size");
    if (args.length == 0) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(CreateRandomStoreFile.class.getSimpleName(), options, true);
        return false;
    }
    CommandLineParser parser = new PosixParser();
    CommandLine cmdLine;
    try {
        cmdLine = parser.parse(options, args);
    } catch (ParseException ex) {
        LOG.error(ex);
        return false;
    }
    if (!cmdLine.hasOption(OUTPUT_DIR_OPTION)) {
        LOG.error("Output directory is not specified");
        return false;
    }
    if (!cmdLine.hasOption(NUM_KV_OPTION)) {
        LOG.error("The number of keys/values not specified");
        return false;
    }
    if (!cmdLine.hasOption(KEY_SIZE_OPTION)) {
        LOG.error("Key size is not specified");
        return false;
    }
    if (!cmdLine.hasOption(VALUE_SIZE_OPTION)) {
        LOG.error("Value size not specified");
        return false;
    }
    Configuration conf = HBaseConfiguration.create();
    Path outputDir = new Path(cmdLine.getOptionValue(OUTPUT_DIR_OPTION));
    long numKV = Long.parseLong(cmdLine.getOptionValue(NUM_KV_OPTION));
    configureKeyValue(numKV, Integer.parseInt(cmdLine.getOptionValue(KEY_SIZE_OPTION)), Integer.parseInt(cmdLine.getOptionValue(VALUE_SIZE_OPTION)));
    FileSystem fs = FileSystem.get(conf);
    Compression.Algorithm compr = Compression.Algorithm.NONE;
    if (cmdLine.hasOption(COMPRESSION_OPTION)) {
        compr = Compression.Algorithm.valueOf(cmdLine.getOptionValue(COMPRESSION_OPTION));
    }
    BloomType bloomType = BloomType.NONE;
    if (cmdLine.hasOption(BLOOM_FILTER_OPTION)) {
        bloomType = BloomType.valueOf(cmdLine.getOptionValue(BLOOM_FILTER_OPTION));
    }
    int blockSize = HConstants.DEFAULT_BLOCKSIZE;
    if (cmdLine.hasOption(BLOCK_SIZE_OPTION))
        blockSize = Integer.valueOf(cmdLine.getOptionValue(BLOCK_SIZE_OPTION));
    if (cmdLine.hasOption(BLOOM_BLOCK_SIZE_OPTION)) {
        conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE, Integer.valueOf(cmdLine.getOptionValue(BLOOM_BLOCK_SIZE_OPTION)));
    }
    if (cmdLine.hasOption(INDEX_BLOCK_SIZE_OPTION)) {
        conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, Integer.valueOf(cmdLine.getOptionValue(INDEX_BLOCK_SIZE_OPTION)));
    }
    HFileContext meta = new HFileContextBuilder().withCompression(compr).withBlockSize(blockSize).build();
    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, new CacheConfig(conf), fs).withOutputDir(outputDir).withBloomType(bloomType).withMaxKeyCount(numKV).withFileContext(meta).build();
    rand = new Random();
    LOG.info("Writing " + numKV + " key/value pairs");
    for (long i = 0; i < numKV; ++i) {
        sfw.append(generateKeyValue(i));
    }
    int numMetaBlocks = rand.nextInt(10) + 1;
    LOG.info("Writing " + numMetaBlocks + " meta blocks");
    for (int metaI = 0; metaI < numMetaBlocks; ++metaI) {
        sfw.getHFileWriter().appendMetaBlock(generateString(), new BytesWritable(generateValue()));
    }
    sfw.close();
    Path storeFilePath = sfw.getPath();
    long fileSize = fs.getFileStatus(storeFilePath).getLen();
    LOG.info("Created " + storeFilePath + ", " + fileSize + " bytes");
    return true;
}
Also used : Path(org.apache.hadoop.fs.Path) Compression(org.apache.hadoop.hbase.io.compress.Compression) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Configuration(org.apache.hadoop.conf.Configuration) PosixParser(org.apache.commons.cli.PosixParser) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) BytesWritable(org.apache.hadoop.io.BytesWritable) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) HelpFormatter(org.apache.commons.cli.HelpFormatter) CommandLine(org.apache.commons.cli.CommandLine) Random(java.util.Random) FileSystem(org.apache.hadoop.fs.FileSystem) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig)

Example 40 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestBulkLoad method createHFileForFamilies.

private String createHFileForFamilies(byte[] family) throws IOException {
    HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(conf);
    // TODO We need a way to do this without creating files
    File hFileLocation = testFolder.newFile();
    FSDataOutputStream out = new FSDataOutputStream(new FileOutputStream(hFileLocation), null);
    try {
        hFileFactory.withOutputStream(out);
        hFileFactory.withFileContext(new HFileContext());
        HFile.Writer writer = hFileFactory.create();
        try {
            writer.append(new KeyValue(CellUtil.createCell(randomBytes, family, randomBytes, 0l, KeyValue.Type.Put.getCode(), randomBytes)));
        } finally {
            writer.close();
        }
    } finally {
        out.close();
    }
    return hFileLocation.getAbsoluteFile().getAbsolutePath();
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) FileOutputStream(java.io.FileOutputStream) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) HFile(org.apache.hadoop.hbase.io.hfile.HFile) HFile(org.apache.hadoop.hbase.io.hfile.HFile) File(java.io.File) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Aggregations

HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)55 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)51 Path (org.apache.hadoop.fs.Path)34 Test (org.junit.Test)31 KeyValue (org.apache.hadoop.hbase.KeyValue)24 FileSystem (org.apache.hadoop.fs.FileSystem)21 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)21 Cell (org.apache.hadoop.hbase.Cell)12 HFile (org.apache.hadoop.hbase.io.hfile.HFile)11 ByteBuffer (java.nio.ByteBuffer)10 Configuration (org.apache.hadoop.conf.Configuration)9 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)7 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 DataOutputStream (java.io.DataOutputStream)5 SingleByteBuff (org.apache.hadoop.hbase.nio.SingleByteBuff)5 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)4 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)4 PrefixTreeCodec (org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec)4 EncodedSeeker (org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker)4