use of org.apache.hadoop.hdfs.DFSInputStream in project SSM by Intel-bigdata.
the class ReadFileAction method execute.
@Override
protected void execute() throws Exception {
if (filePath == null) {
throw new IllegalArgumentException("File parameter is missing.");
}
appendLog(String.format("Action starts at %s : Read %s", Utils.getFormatedCurrentTime(), filePath));
if (!dfsClient.exists(filePath)) {
throw new ActionException("ReadFile Action fails, file " + filePath + " doesn't exist!");
}
DFSInputStream dfsInputStream = dfsClient.open(filePath);
byte[] buffer = new byte[bufferSize];
// read from HDFS
while (dfsInputStream.read(buffer, 0, bufferSize) != -1) {
}
dfsInputStream.close();
}
use of org.apache.hadoop.hdfs.DFSInputStream in project SSM by Intel-bigdata.
the class SmartDFSClient method open.
@Override
public DFSInputStream open(HdfsPathHandle fd, int buffersize, boolean verifyChecksum) throws IOException {
String src = fd.getPath();
DFSInputStream is = super.open(fd, buffersize, verifyChecksum);
if (is.getFileLength() == 0) {
is.close();
FileState fileState = getFileState(src);
if (fileState.getFileStage().equals(FileState.FileStage.PROCESSING)) {
throw new IOException("Cannot open " + src + " when it is under PROCESSING to " + fileState.getFileType());
}
is = SmartInputStreamFactory.create(this, src, verifyChecksum, fileState);
}
reportFileAccessEvent(src);
return is;
}
use of org.apache.hadoop.hdfs.DFSInputStream in project SSM by Intel-bigdata.
the class ReadFileAction method execute.
@Override
protected void execute() {
ActionStatus actionStatus = getActionStatus();
actionStatus.begin();
try {
HdfsFileStatus fileStatus = dfsClient.getFileInfo(filePath);
if (fileStatus == null) {
resultOut.println("ReadFile Action fails, file doesn't exist!");
}
DFSInputStream dfsInputStream = dfsClient.open(filePath);
byte[] buffer = new byte[bufferSize];
// read from HDFS
while (dfsInputStream.read(buffer, 0, bufferSize) != -1) {
}
dfsInputStream.close();
actionStatus.setSuccessful(true);
} catch (IOException e) {
actionStatus.setSuccessful(false);
resultOut.println("ReadFile Action fails!\n" + e.getMessage());
} finally {
actionStatus.end();
}
}
use of org.apache.hadoop.hdfs.DFSInputStream in project SSM by Intel-bigdata.
the class SmartDFSClient method open.
@Override
public DFSInputStream open(String src, int buffersize, boolean verifyChecksum) throws IOException, UnresolvedLinkException {
DFSInputStream is = super.open(src, buffersize, verifyChecksum);
reportFileAccessEvent(src);
return is;
}
use of org.apache.hadoop.hdfs.DFSInputStream in project SSM by Intel-bigdata.
the class TestSmallFileRead method testRead.
@Test
public void testRead() throws Exception {
waitTillSSMExitSafeMode();
SmartDFSClient smartDFSClient = new SmartDFSClient(smartContext.getConf());
DFSInputStream is = smartDFSClient.open("/test/small_files/file_0");
Assert.assertEquals(1, is.getAllBlocks().size());
Assert.assertEquals(fileLength, is.getFileLength());
Assert.assertEquals(0, is.getPos());
int byteRead = is.read();
Assert.assertEquals(ret, byteRead);
byte[] bytes = new byte[50];
Assert.assertEquals(fileLength - 1, is.read(bytes));
is.close();
is = smartDFSClient.open("/test/small_files/file_0");
ByteBuffer buffer = ByteBuffer.allocate(50);
Assert.assertEquals(fileLength, is.read(buffer));
is.close();
is = smartDFSClient.open("/test/small_files/file_0");
Assert.assertEquals(fileLength - 2, is.read(2, bytes, 1, 50));
is.close();
}
Aggregations