use of org.smartdata.hdfs.scheduler.CacheScheduler in project SSM by Intel-bigdata.
the class TestCacheScheduler method testCacheUncacheFile.
@Test(timeout = 100000)
public void testCacheUncacheFile() throws Exception {
waitTillSSMExitSafeMode();
String filePath = new String("/testFile");
FSDataOutputStream out = dfs.create(new Path(filePath));
out.writeChars("test content");
out.close();
CmdletManager cmdletManager = ssm.getCmdletManager();
long cid = cmdletManager.submitCmdlet("cache -file " + filePath);
while (true) {
if (cmdletManager.getCmdletInfo(cid).getState().equals(CmdletState.DONE)) {
break;
}
}
RemoteIterator<CachePoolEntry> poolEntries = dfsClient.listCachePools();
while (poolEntries.hasNext()) {
CachePoolEntry poolEntry = poolEntries.next();
if (poolEntry.getInfo().getPoolName().equals(CacheScheduler.SSM_POOL)) {
return;
}
fail("A cache pool should be created by SSM: " + CacheScheduler.SSM_POOL);
}
// Currently, there is only one scheduler for cache action
ActionScheduler actionScheduler = cmdletManager.getSchedulers("cache").get(0);
assertTrue(actionScheduler instanceof CacheScheduler);
Set<String> fileLock = ((CacheScheduler) actionScheduler).getFileLock();
// There is no file locked after the action is finished.
assertTrue(fileLock.isEmpty());
long cid1 = cmdletManager.submitCmdlet("uncache -file " + filePath);
while (true) {
if (cmdletManager.getCmdletInfo(cid1).getState().equals(CmdletState.DONE)) {
break;
}
}
// There is no file locked after the action is finished.
assertTrue(fileLock.isEmpty());
}
Aggregations