Search in sources :

Example 11 with HBaseTestingUtility

use of org.apache.hadoop.hbase.HBaseTestingUtility in project hadoop by apache.

the class TestHBaseStorageFlowRunCompaction method setupBeforeClass.

@BeforeClass
public static void setupBeforeClass() throws Exception {
    util = new HBaseTestingUtility();
    Configuration conf = util.getConfiguration();
    conf.setInt("hfile.format.version", 3);
    util.startMiniCluster();
    createSchema();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HBaseTestingUtility(org.apache.hadoop.hbase.HBaseTestingUtility) BeforeClass(org.junit.BeforeClass)

Example 12 with HBaseTestingUtility

use of org.apache.hadoop.hbase.HBaseTestingUtility in project hbase by apache.

the class TestClientOperationInterrupt method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = HBaseConfiguration.create();
    conf.setStrings(CoprocessorHost.USER_REGION_COPROCESSOR_CONF_KEY, TestCoprocessor.class.getName());
    util = new HBaseTestingUtility(conf);
    util.startMiniCluster();
    Admin admin = util.getAdmin();
    if (admin.tableExists(tableName)) {
        if (admin.isTableEnabled(tableName)) {
            admin.disableTable(tableName);
        }
        admin.deleteTable(tableName);
    }
    Table ht = util.createTable(tableName, new byte[][] { dummy, test });
    Put p = new Put(row1);
    p.addColumn(dummy, dummy, dummy);
    ht.put(p);
}
Also used : HBaseTestingUtility(org.apache.hadoop.hbase.HBaseTestingUtility) BeforeClass(org.junit.BeforeClass)

Example 13 with HBaseTestingUtility

use of org.apache.hadoop.hbase.HBaseTestingUtility in project hbase by apache.

the class TestCacheConfig method testFileBucketCacheConfig.

@Test
public void testFileBucketCacheConfig() throws IOException {
    HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);
    try {
        Path p = new Path(htu.getDataTestDir(), "bc.txt");
        FileSystem fs = FileSystem.get(this.conf);
        fs.create(p).close();
        this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);
        doBucketCacheConfigTest();
    } finally {
        htu.cleanupTestDir();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HBaseTestingUtility(org.apache.hadoop.hbase.HBaseTestingUtility) FileSystem(org.apache.hadoop.fs.FileSystem) Test(org.junit.Test)

Example 14 with HBaseTestingUtility

use of org.apache.hadoop.hbase.HBaseTestingUtility in project hbase by apache.

the class TestFileLink method testHDFSLinkReadDuringDelete.

/**
   * Test that link is still readable even when the current file gets deleted.
   *
   * NOTE: This test is valid only on HDFS.
   * When a file is deleted from a local file-system, it is simply 'unlinked'.
   * The inode, which contains the file's data, is not deleted until all
   * processes have finished with it.
   * In HDFS when the request exceed the cached block locations,
   * a query to the namenode is performed, using the filename,
   * and the deleted file doesn't exists anymore (FileNotFoundException).
   */
@Test
public void testHDFSLinkReadDuringDelete() throws Exception {
    HBaseTestingUtility testUtil = new HBaseTestingUtility();
    Configuration conf = testUtil.getConfiguration();
    conf.setInt("dfs.blocksize", 1024 * 1024);
    conf.setInt("dfs.client.read.prefetch.size", 2 * 1024 * 1024);
    testUtil.startMiniDFSCluster(1);
    MiniDFSCluster cluster = testUtil.getDFSCluster();
    FileSystem fs = cluster.getFileSystem();
    assertEquals("hdfs", fs.getUri().getScheme());
    try {
        List<Path> files = new ArrayList<>();
        for (int i = 0; i < 3; i++) {
            Path path = new Path(String.format("test-data-%d", i));
            writeSomeData(fs, path, 1 << 20, (byte) i);
            files.add(path);
        }
        FileLink link = new FileLink(files);
        FSDataInputStream in = link.open(fs);
        try {
            byte[] data = new byte[8192];
            int n;
            // Switch to file 1
            n = in.read(data);
            dataVerify(data, n, (byte) 0);
            fs.delete(files.get(0), true);
            skipBuffer(in, (byte) 0);
            // Switch to file 2
            n = in.read(data);
            dataVerify(data, n, (byte) 1);
            fs.delete(files.get(1), true);
            skipBuffer(in, (byte) 1);
            // Switch to file 3
            n = in.read(data);
            dataVerify(data, n, (byte) 2);
            fs.delete(files.get(2), true);
            skipBuffer(in, (byte) 2);
            // No more files available
            try {
                n = in.read(data);
                assert (n <= 0);
            } catch (FileNotFoundException e) {
                assertTrue(true);
            }
        } finally {
            in.close();
        }
    } finally {
        testUtil.shutdownMiniCluster();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) Configuration(org.apache.hadoop.conf.Configuration) HBaseTestingUtility(org.apache.hadoop.hbase.HBaseTestingUtility) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Test(org.junit.Test)

Example 15 with HBaseTestingUtility

use of org.apache.hadoop.hbase.HBaseTestingUtility in project hbase by apache.

the class TestFileLink method testHDFSLinkReadDuringRename.

/**
   * Test, on HDFS, that the FileLink is still readable
   * even when the current file gets renamed.
   */
@Test
public void testHDFSLinkReadDuringRename() throws Exception {
    HBaseTestingUtility testUtil = new HBaseTestingUtility();
    Configuration conf = testUtil.getConfiguration();
    conf.setInt("dfs.blocksize", 1024 * 1024);
    conf.setInt("dfs.client.read.prefetch.size", 2 * 1024 * 1024);
    testUtil.startMiniDFSCluster(1);
    MiniDFSCluster cluster = testUtil.getDFSCluster();
    FileSystem fs = cluster.getFileSystem();
    assertEquals("hdfs", fs.getUri().getScheme());
    try {
        testLinkReadDuringRename(fs, testUtil.getDefaultRootDirPath());
    } finally {
        testUtil.shutdownMiniCluster();
    }
}
Also used : MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) Configuration(org.apache.hadoop.conf.Configuration) HBaseTestingUtility(org.apache.hadoop.hbase.HBaseTestingUtility) FileSystem(org.apache.hadoop.fs.FileSystem) Test(org.junit.Test)

Aggregations

HBaseTestingUtility (org.apache.hadoop.hbase.HBaseTestingUtility)136 Configuration (org.apache.hadoop.conf.Configuration)50 BeforeClass (org.junit.BeforeClass)49 Test (org.junit.Test)42 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)35 Path (org.apache.hadoop.fs.Path)29 Admin (org.apache.hadoop.hbase.client.Admin)24 FileSystem (org.apache.hadoop.fs.FileSystem)22 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)20 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)18 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)16 Before (org.junit.Before)14 MiniHBaseCluster (org.apache.hadoop.hbase.MiniHBaseCluster)11 ZooKeeperWatcher (org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher)11 MiniZooKeeperCluster (org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster)10 Table (org.apache.hadoop.hbase.client.Table)8 HFileSystem (org.apache.hadoop.hbase.fs.HFileSystem)8 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)8 FileStatus (org.apache.hadoop.fs.FileStatus)7 Result (org.apache.hadoop.hbase.client.Result)7