use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.
the class TestServerLoadDurability method setUp.
@Before
public void setUp() throws Exception {
utility = new HBaseTestingUtil(conf);
utility.startMiniCluster(2);
conn = ConnectionFactory.createConnection(utility.getConfiguration());
admin = conn.getAdmin();
String methodName = testName.getMethodName();
tableName = TableName.valueOf(methodName.substring(0, methodName.length() - 3));
}
use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.
the class TestVerifyReplicationCrossDiffHdfs method setUpBeforeClass.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
conf1.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
util1 = new HBaseTestingUtil(conf1);
util1.startMiniZKCluster();
MiniZooKeeperCluster miniZK = util1.getZkCluster();
conf1 = util1.getConfiguration();
conf2 = HBaseConfiguration.create(conf1);
conf2.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/2");
util2 = new HBaseTestingUtil(conf2);
util2.setZkCluster(miniZK);
util1.startMiniCluster();
util2.startMiniCluster();
createTestingTable(util1.getAdmin());
createTestingTable(util2.getAdmin());
addTestingPeer();
LOG.info("Start to load some data to source cluster.");
loadSomeData();
LOG.info("Start mini MapReduce cluster.");
mapReduceUtil.setZkCluster(miniZK);
mapReduceUtil.startMiniMapReduceCluster();
}
use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.
the class TestTableMapReduceUtil method testInitCredentialsForCluster2.
@Test
@SuppressWarnings("unchecked")
public void testInitCredentialsForCluster2() throws Exception {
HBaseTestingUtil util1 = new HBaseTestingUtil();
HBaseTestingUtil util2 = new HBaseTestingUtil();
File keytab = new File(util1.getDataTestDir("keytab").toUri().getPath());
MiniKdc kdc = util1.setupMiniKdc(keytab);
try {
String username = UserGroupInformation.getLoginUser().getShortUserName();
String userPrincipal = username + "/localhost";
kdc.createPrincipal(keytab, userPrincipal, HTTP_PRINCIPAL);
loginUserFromKeytab(userPrincipal + '@' + kdc.getRealm(), keytab.getAbsolutePath());
try (Closeable util1Closeable = startSecureMiniCluster(util1, kdc, userPrincipal);
Closeable util2Closeable = startSecureMiniCluster(util2, kdc, userPrincipal)) {
Configuration conf1 = util1.getConfiguration();
Job job = Job.getInstance(conf1);
TableMapReduceUtil.initCredentialsForCluster(job, util2.getConfiguration());
Credentials credentials = job.getCredentials();
Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens();
assertEquals(1, tokens.size());
String clusterId = ZKClusterId.readClusterIdZNode(util2.getZooKeeperWatcher());
Token<AuthenticationTokenIdentifier> tokenForCluster = (Token<AuthenticationTokenIdentifier>) credentials.getToken(new Text(clusterId));
assertEquals(userPrincipal + '@' + kdc.getRealm(), tokenForCluster.decodeIdentifier().getUsername());
}
} finally {
kdc.stop();
}
}
use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.
the class TestBlockReorder method setUp.
@Before
public void setUp() throws Exception {
htu = new HBaseTestingUtil();
// For the test with multiple blocks
htu.getConfiguration().setInt("dfs.blocksize", 1024);
htu.getConfiguration().setInt("dfs.replication", 3);
htu.startMiniDFSCluster(3, new String[] { "/r1", "/r2", "/r3" }, new String[] { host1, host2, host3 });
conf = htu.getConfiguration();
cluster = htu.getDFSCluster();
dfs = (DistributedFileSystem) FileSystem.get(conf);
}
use of org.apache.hadoop.hbase.HBaseTestingUtil in project hbase by apache.
the class TestBlockReorderMultiBlocks method setUp.
@Before
public void setUp() throws Exception {
htu = new HBaseTestingUtil();
// For the test with multiple blocks
htu.getConfiguration().setInt("dfs.blocksize", 1024);
htu.getConfiguration().setInt("dfs.replication", 3);
htu.startMiniDFSCluster(3, new String[] { "/r1", "/r2", "/r3" }, new String[] { host1, host2, host3 });
conf = htu.getConfiguration();
cluster = htu.getDFSCluster();
dfs = (DistributedFileSystem) FileSystem.get(conf);
}
Aggregations