use of org.smartdata.hdfs.client.SmartDFSClient in project SSM by Intel-bigdata.
the class TestSmallFileRead method testRead.
@Test
public void testRead() throws Exception {
waitTillSSMExitSafeMode();
SmartDFSClient smartDFSClient = new SmartDFSClient(smartContext.getConf());
DFSInputStream is = smartDFSClient.open("/test/small_files/file_0");
Assert.assertEquals(1, is.getAllBlocks().size());
Assert.assertEquals(fileLength, is.getFileLength());
Assert.assertEquals(0, is.getPos());
int byteRead = is.read();
Assert.assertEquals(ret, byteRead);
byte[] bytes = new byte[50];
Assert.assertEquals(fileLength - 1, is.read(bytes));
is.close();
is = smartDFSClient.open("/test/small_files/file_0");
ByteBuffer buffer = ByteBuffer.allocate(50);
Assert.assertEquals(fileLength, is.read(buffer));
is.close();
is = smartDFSClient.open("/test/small_files/file_0");
Assert.assertEquals(fileLength - 2, is.read(2, bytes, 1, 50));
is.close();
}
use of org.smartdata.hdfs.client.SmartDFSClient in project SSM by Intel-bigdata.
the class TestSmartDFSClientReadECData method testReadECDataCreatedBySSM.
@Test
public void testReadECDataCreatedBySSM() throws IOException {
cluster.getFileSystem().mkdirs(new Path(TEST_DIR));
String srcPath = "/ec/a.txt";
createTestFile(srcPath, 300000);
SmartConf smartConf = smartContext.getConf();
// The below single configuration is in order to make sure a SmartDFSClient can be created
// successfully, and the actual value for this property does't matter.
smartConf.set(SmartConfKeys.SMART_SERVER_RPC_ADDRESS_KEY, SmartConfKeys.SMART_SERVER_RPC_ADDRESS_DEFAULT);
SmartDFSClient smartDFSClient = new SmartDFSClient(smartConf);
ErasureCodingAction ecAction = new ErasureCodingAction();
ecAction.setContext(smartContext);
String ecTmpPath = "/ssm/ec_tmp/tmp_file";
Map<String, String> args = new HashMap<>();
args.put(HdfsAction.FILE_PATH, srcPath);
args.put(ErasureCodingBase.EC_TMP, ecTmpPath);
args.put(ErasureCodingAction.EC_POLICY_NAME, ecPolicy.getName());
ecAction.init(args);
ecAction.run();
assertTrue(ecAction.getExpectedAfterRun());
Assert.assertTrue(ecPolicy == dfsClient.getErasureCodingPolicy(srcPath));
DFSInputStream dfsInputStream = smartDFSClient.open(srcPath);
// In unit test, a DFSInputStream can still be used to read EC data. But in real environment,
// DFSStripedInputStream is required, otherwise, block not found exception will occur.
Assert.assertTrue(dfsInputStream instanceof DFSStripedInputStream);
int bufferSize = 64 * 1024;
byte[] buffer = new byte[bufferSize];
// Read EC data from HDFS
while (dfsInputStream.read(buffer, 0, bufferSize) != -1) {
}
dfsInputStream.close();
}
use of org.smartdata.hdfs.client.SmartDFSClient in project SSM by Intel-bigdata.
the class TestSmartDFSClientReadECData method testReadECDataCreatedByHDFS.
@Test
public void testReadECDataCreatedByHDFS() throws IOException {
cluster.getFileSystem().mkdirs(new Path(TEST_DIR));
// Set an EC policy for this test dir, so the file created under it will
// be stored by this EC policy.
dfsClient.setErasureCodingPolicy(TEST_DIR, ecPolicy.getName());
String srcPath = "/ec/a.txt";
createTestFile(srcPath, 300000);
Assert.assertTrue(ecPolicy == dfsClient.getErasureCodingPolicy(srcPath));
SmartConf smartConf = smartContext.getConf();
// The below single configuration is in order to make sure a SmartDFSClient can be created
// successfully, and the actual value for this property does't matter.
smartConf.set(SmartConfKeys.SMART_SERVER_RPC_ADDRESS_KEY, SmartConfKeys.SMART_SERVER_RPC_ADDRESS_DEFAULT);
SmartDFSClient smartDFSClient = new SmartDFSClient(smartConf);
DFSInputStream dfsInputStream = smartDFSClient.open(srcPath);
// In unit test, a DFSInputStream can still be used to read EC data. But in real environment,
// DFSStripedInputStream is required, otherwise, block not found exception will occur.
Assert.assertTrue(dfsInputStream instanceof DFSStripedInputStream);
int bufferSize = 64 * 1024;
byte[] buffer = new byte[bufferSize];
// Read EC data from HDFS
while (dfsInputStream.read(buffer, 0, bufferSize) != -1) {
}
dfsInputStream.close();
}
Aggregations