use of org.apache.hadoop.fs.FSDataInputStream in project camel by apache.
the class HdfsAppendTest method testAppend.
@Test
public void testAppend() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start1").to("hdfs://localhost:9000/tmp/test/test-camel-simple-write-file1?append=true&fileSystemType=HDFS");
}
});
startCamelContext();
for (int i = 0; i < 10; ++i) {
template.sendBody("direct:start1", "PIPPO");
}
Configuration conf = new Configuration();
Path file = new Path("hdfs://localhost:9000/tmp/test/test-camel-simple-write-file1");
FileSystem fs = FileSystem.get(file.toUri(), conf);
FSDataInputStream in = fs.open(file);
byte[] buffer = new byte[5];
int ret = 0;
for (int i = 0; i < 20; ++i) {
ret = in.read(buffer);
}
ret = in.read(buffer);
assertEquals(-1, ret);
in.close();
}
use of org.apache.hadoop.fs.FSDataInputStream in project camel by apache.
the class HdfsAppendTest method testAppend.
@Test
public void testAppend() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start1").to("hdfs2://localhost:9000/tmp/test/test-camel-simple-write-file1?append=true&fileSystemType=HDFS");
}
});
startCamelContext();
for (int i = 0; i < 10; ++i) {
template.sendBody("direct:start1", "PIPPQ");
}
Configuration conf = new Configuration();
Path file = new Path("hdfs://localhost:9000/tmp/test/test-camel-simple-write-file1");
FileSystem fs = FileSystem.get(file.toUri(), conf);
FSDataInputStream in = fs.open(file);
byte[] buffer = new byte[5];
int ret = 0;
for (int i = 0; i < 20; ++i) {
ret = in.read(buffer);
System.out.println("> " + new String(buffer));
}
ret = in.read(buffer);
assertEquals(-1, ret);
in.close();
}
use of org.apache.hadoop.fs.FSDataInputStream in project camel by apache.
the class HdfsAppendTest method testAppendWithDynamicFileName.
@Test
public void testAppendWithDynamicFileName() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start1").to("hdfs2://localhost:9000/tmp/test-dynamic/?append=true&fileSystemType=HDFS");
}
});
startCamelContext();
for (int i = 0; i < ITERATIONS; ++i) {
template.sendBodyAndHeader("direct:start1", "HELLO", Exchange.FILE_NAME, "camel-hdfs2.log");
}
Configuration conf = new Configuration();
Path file = new Path("hdfs://localhost:9000/tmp/test-dynamic/camel-hdfs2.log");
FileSystem fs = FileSystem.get(file.toUri(), conf);
FSDataInputStream in = fs.open(file);
byte[] buffer = new byte[5];
for (int i = 0; i < ITERATIONS; ++i) {
assertEquals(5, in.read(buffer));
System.out.println("> " + new String(buffer));
}
int ret = in.read(buffer);
assertEquals(-1, ret);
in.close();
}
use of org.apache.hadoop.fs.FSDataInputStream in project hadoop by apache.
the class SecureIOUtils method forceSecureOpenFSDataInputStream.
/**
* Same as openFSDataInputStream except that it will run even if security is
* off. This is used by unit tests.
*/
@VisibleForTesting
protected static FSDataInputStream forceSecureOpenFSDataInputStream(File file, String expectedOwner, String expectedGroup) throws IOException {
final FSDataInputStream in = rawFilesystem.open(new Path(file.getAbsolutePath()));
boolean success = false;
try {
Stat stat = NativeIO.POSIX.getFstat(in.getFileDescriptor());
checkStat(file, stat.getOwner(), stat.getGroup(), expectedOwner, expectedGroup);
success = true;
return in;
} finally {
if (!success) {
in.close();
}
}
}
use of org.apache.hadoop.fs.FSDataInputStream in project hadoop by apache.
the class ITUseMiniCluster method simpleReadAfterWrite.
public void simpleReadAfterWrite(final FileSystem fs) throws IOException {
LOG.info("Testing read-after-write with FS implementation: {}", fs);
final Path path = new Path(TEST_PATH, FILENAME);
if (!fs.mkdirs(path.getParent())) {
throw new IOException("Mkdirs failed to create " + TEST_PATH);
}
try (final FSDataOutputStream out = fs.create(path)) {
out.writeUTF(TEXT);
}
try (final FSDataInputStream in = fs.open(path)) {
final String result = in.readUTF();
Assert.assertEquals("Didn't read back text we wrote.", TEXT, result);
}
}
Aggregations