use of org.apache.hadoop.hbase.HBaseTestingUtility in project hbase by apache.
the class TestReplicationTrackerZKImpl method setUpBeforeClass.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
utility = new HBaseTestingUtility();
utility.startMiniZKCluster();
conf = utility.getConfiguration();
ZooKeeperWatcher zk = HBaseTestingUtility.getZooKeeperWatcher(utility);
ZKUtil.createWithParents(zk, zk.znodePaths.rsZNode);
}
use of org.apache.hadoop.hbase.HBaseTestingUtility in project hbase by apache.
the class TestBlockReorder method setUp.
@Before
public void setUp() throws Exception {
htu = new HBaseTestingUtility();
// For the test with multiple blocks
htu.getConfiguration().setInt("dfs.blocksize", 1024);
htu.getConfiguration().setInt("dfs.replication", 3);
htu.startMiniDFSCluster(3, new String[] { "/r1", "/r2", "/r3" }, new String[] { host1, host2, host3 });
conf = htu.getConfiguration();
cluster = htu.getDFSCluster();
dfs = (DistributedFileSystem) FileSystem.get(conf);
}
use of org.apache.hadoop.hbase.HBaseTestingUtility in project hbase by apache.
the class TestFSUtils method testSetWALRootDir.
@Test
public void testSetWALRootDir() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
Configuration conf = htu.getConfiguration();
Path p = new Path("file:///hbase/root");
FSUtils.setWALRootDir(conf, p);
assertEquals(p.toString(), conf.get(HFileSystem.HBASE_WAL_DIR));
}
use of org.apache.hadoop.hbase.HBaseTestingUtility in project hbase by apache.
the class TestFSUtils method testDFSHedgedReadMetrics.
/**
* Ugly test that ensures we can get at the hedged read counters in dfsclient.
* Does a bit of preading with hedged reads enabled using code taken from hdfs TestPread.
* @throws Exception
*/
@Test
public void testDFSHedgedReadMetrics() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
// Enable hedged reads and set it so the threshold is really low.
// Most of this test is taken from HDFS, from TestPread.
Configuration conf = htu.getConfiguration();
conf.setInt(DFSConfigKeys.DFS_DFSCLIENT_HEDGED_READ_THREADPOOL_SIZE, 5);
conf.setLong(DFSConfigKeys.DFS_DFSCLIENT_HEDGED_READ_THRESHOLD_MILLIS, 0);
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 4096);
conf.setLong(DFSConfigKeys.DFS_CLIENT_READ_PREFETCH_SIZE_KEY, 4096);
// Set short retry timeouts so this test runs faster
conf.setInt(DFSConfigKeys.DFS_CLIENT_RETRY_WINDOW_BASE, 0);
conf.setBoolean("dfs.datanode.transferTo.allowed", false);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
// Get the metrics. Should be empty.
DFSHedgedReadMetrics metrics = FSUtils.getDFSHedgedReadMetrics(conf);
assertEquals(0, metrics.getHedgedReadOps());
FileSystem fileSys = cluster.getFileSystem();
try {
Path p = new Path("preadtest.dat");
// We need > 1 blocks to test out the hedged reads.
DFSTestUtil.createFile(fileSys, p, 12 * blockSize, 12 * blockSize, blockSize, (short) 3, seed);
pReadFile(fileSys, p);
cleanupFile(fileSys, p);
assertTrue(metrics.getHedgedReadOps() > 0);
} finally {
fileSys.close();
cluster.shutdown();
}
}
use of org.apache.hadoop.hbase.HBaseTestingUtility in project hbase by apache.
the class TestFSUtils method testGetWALRootDir.
@Test
public void testGetWALRootDir() throws IOException {
HBaseTestingUtility htu = new HBaseTestingUtility();
Configuration conf = htu.getConfiguration();
Path root = new Path("file:///hbase/root");
Path walRoot = new Path("file:///hbase/logroot");
FSUtils.setRootDir(conf, root);
assertEquals(FSUtils.getRootDir(conf), root);
assertEquals(FSUtils.getWALRootDir(conf), root);
FSUtils.setWALRootDir(conf, walRoot);
assertEquals(FSUtils.getWALRootDir(conf), walRoot);
}
Aggregations