use of org.smartdata.server.engine.CmdletManager in project SSM by Intel-bigdata.
the class TestCompressDecompress method testUnsupportedMethod.
@Test
public void testUnsupportedMethod() throws Exception {
// Concat, truncate and append are not supported
// Create raw file
Path path = new Path("/test/compress_files/");
dfs.mkdirs(path);
int rawLength = 1024 * 1024 * 8;
String fileName = "/test/compress_files/file_0";
DFSTestUtil.createFile(dfs, new Path(fileName), rawLength, (short) 1, 1);
int bufSize = 1024 * 1024;
waitTillSSMExitSafeMode();
CmdletManager cmdletManager = ssm.getCmdletManager();
// Compress files
long cmdId = cmdletManager.submitCmdlet("compress -file " + fileName + " -bufSize " + bufSize + " -codec " + codec);
waitTillActionDone(cmdId);
SmartDFSClient smartDFSClient = new SmartDFSClient(smartContext.getConf());
// Test unsupported methods on compressed file
try {
smartDFSClient.concat(fileName + "target", new String[] { fileName });
} catch (IOException e) {
Assert.assertTrue(e.getMessage().contains("Compressed"));
}
/*try {
smartDFSClient.truncate(fileName, 100L);
} catch (IOException e) {
Assert.assertTrue(e.getMessage().contains("Compressed"));
}*/
}
use of org.smartdata.server.engine.CmdletManager in project SSM by Intel-bigdata.
the class TestCompressDecompress method waitTillActionDone.
private void waitTillActionDone(long cmdId) throws Exception {
int n = 0;
while (true) {
Thread.sleep(1000);
CmdletManager cmdletManager = ssm.getCmdletManager();
CmdletInfo info = cmdletManager.getCmdletInfo(cmdId);
if (info == null) {
continue;
}
CmdletState state = info.getState();
if (state == CmdletState.DONE) {
return;
} else if (state == CmdletState.FAILED) {
// Reasonably assume that there is only one action wrapped by a given cmdlet.
long aid = cmdletManager.getCmdletInfo(cmdId).getAids().get(0);
Assert.fail("Action failed. " + cmdletManager.getActionInfo(aid).getLog());
} else {
System.out.println(state);
}
// Wait for 20s.
if (++n == 20) {
throw new Exception("Time out in waiting for cmdlet: " + cmdletManager.getCmdletInfo(cmdId).toString());
}
}
}
use of org.smartdata.server.engine.CmdletManager in project SSM by Intel-bigdata.
the class TestCompressDecompress method testDecompress.
@Test
public void testDecompress() throws Exception {
int arraySize = 1024 * 1024 * 8;
String filePath = "/ssm/compression/file4";
prepareFile(filePath, arraySize);
dfsClient.setStoragePolicy(filePath, "COLD");
HdfsFileStatus fileStatusBefore = dfsClient.getFileInfo(filePath);
CmdletManager cmdletManager = ssm.getCmdletManager();
// Expect that a common file cannot be decompressed.
List<ActionScheduler> schedulers = cmdletManager.getSchedulers("decompress");
Assert.assertTrue(schedulers.size() == 1);
ActionScheduler scheduler = schedulers.get(0);
Assert.assertTrue(scheduler instanceof CompressionScheduler);
Assert.assertFalse(((CompressionScheduler) scheduler).supportDecompression(filePath));
// Compress the given file
long cmdId = cmdletManager.submitCmdlet("compress -file " + filePath + " -codec " + codec);
waitTillActionDone(cmdId);
FileState fileState = HadoopUtil.getFileState(dfsClient, filePath);
Assert.assertTrue(fileState instanceof CompressionFileState);
// The storage policy should not be changed
HdfsFileStatus fileStatusAfterCompress = dfsClient.getFileInfo(filePath);
if (fileStatusBefore.getStoragePolicy() != 0) {
// To make sure the consistency of storage policy
Assert.assertEquals(fileStatusBefore.getStoragePolicy(), fileStatusAfterCompress.getStoragePolicy());
}
// Try to decompress a compressed file
cmdId = cmdletManager.submitCmdlet("decompress -file " + filePath);
waitTillActionDone(cmdId);
fileState = HadoopUtil.getFileState(dfsClient, filePath);
Assert.assertFalse(fileState instanceof CompressionFileState);
// The storage policy should not be changed.
HdfsFileStatus fileStatusAfterDeCompress = dfsClient.getFileInfo(filePath);
if (fileStatusBefore.getStoragePolicy() != 0) {
// To make sure the consistency of storage policy
Assert.assertEquals(fileStatusBefore.getStoragePolicy(), fileStatusAfterDeCompress.getStoragePolicy());
}
}
use of org.smartdata.server.engine.CmdletManager in project SSM by Intel-bigdata.
the class TestCompressDecompress method testRename.
@Test
public void testRename() throws Exception {
// Create raw file
Path path = new Path("/test/compress_files/");
dfs.mkdirs(path);
int rawLength = 1024 * 1024 * 8;
String fileName = "/test/compress_files/file_0";
DFSTestUtil.createFile(dfs, new Path(fileName), rawLength, (short) 1, 1);
int bufSize = 1024 * 1024;
waitTillSSMExitSafeMode();
CmdletManager cmdletManager = ssm.getCmdletManager();
// Compress files
long cmdId = cmdletManager.submitCmdlet("compress -file " + fileName + " -bufSize " + bufSize + " -codec " + codec);
waitTillActionDone(cmdId);
SmartDFSClient smartDFSClient = new SmartDFSClient(smartContext.getConf());
smartDFSClient.rename("/test/compress_files/file_0", "/test/compress_files/file_4");
Assert.assertTrue(smartDFSClient.exists("/test/compress_files/file_4"));
HdfsFileStatus fileStatus = smartDFSClient.getFileInfo("/test/compress_files/file_4");
Assert.assertEquals(rawLength, fileStatus.getLen());
}
use of org.smartdata.server.engine.CmdletManager in project SSM by Intel-bigdata.
the class TestCompressDecompress method testCompressedFileRandomRead.
// @Test(timeout = 90000)
// public void testCompressEmptyFile() throws Exception {
// waitTillSSMExitSafeMode();
//
// // initDB();
// String fileName = "/ssm/compression/file2";
// prepareFile(fileName, 0);
// MetaStore metaStore = ssm.getMetaStore();
//
// int bufSize = 1024 * 1024;
// CmdletManager cmdletManager = ssm.getCmdletManager();
// long cmdId = cmdletManager.submitCmdlet("compress -file " + fileName
// + " -bufSize " + bufSize + " -compressImpl " + compressionImpl);
//
// waitTillActionDone(cmdId);
// FileState fileState = metaStore.getFileState(fileName);
// while (!fileState.getFileType().equals(FileState.FileType.COMPRESSION)) {
// Thread.sleep(200);
// fileState = metaStore.getFileState(fileName);
// }
//
// // metastore test
// // Assert.assertEquals(FileState.FileType.COMPRESSION, fileState.getFileType());
// Assert.assertEquals(FileState.FileStage.DONE, fileState.getFileStage());
// Assert.assertTrue(fileState instanceof CompressionFileState);
// CompressionFileState compressionFileState = (CompressionFileState) fileState;
// Assert.assertEquals(fileName, compressionFileState.getPath());
// Assert.assertEquals(bufSize, compressionFileState.getBufferSize());
// Assert.assertEquals(compressionImpl, compressionFileState.getCompressionImpl());
// Assert.assertEquals(0, compressionFileState.getOriginalLength());
// Assert.assertEquals(0, compressionFileState.getCompressedLength());
//
// // File length test
// Assert.assertEquals(0, dfsClient.getFileInfo(fileName).getLen());
// }
@Test
public void testCompressedFileRandomRead() throws Exception {
// if (!loadedNative()) {
// return;
// }
waitTillSSMExitSafeMode();
// initDB();
int arraySize = 1024 * 1024 * 8;
String fileName = "/ssm/compression/file3";
byte[] bytes = prepareFile(fileName, arraySize);
int bufSize = 1024 * 1024;
CmdletManager cmdletManager = ssm.getCmdletManager();
long cmdId = cmdletManager.submitCmdlet("compress -file " + fileName + " -bufSize " + bufSize + " -codec " + codec);
waitTillActionDone(cmdId);
// Test random read
Random rnd = new Random(System.currentTimeMillis());
DFSInputStream dfsInputStream = smartDFSClient.open(fileName);
int randomReadSize = 500;
byte[] randomReadBuffer = new byte[randomReadSize];
for (int i = 0; i < 5; i++) {
int pos = rnd.nextInt(arraySize - 500);
byte[] subBytes = Arrays.copyOfRange(bytes, pos, pos + 500);
dfsInputStream.seek(pos);
Assert.assertEquals(pos, dfsInputStream.getPos());
int off = 0;
while (off < randomReadSize) {
int len = dfsInputStream.read(randomReadBuffer, off, randomReadSize - off);
off += len;
}
Assert.assertArrayEquals(subBytes, randomReadBuffer);
Assert.assertEquals(pos + 500, dfsInputStream.getPos());
}
}
Aggregations