use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsProducerTest method testProducerClose.
@Test
public void testProducerClose() throws Exception {
if (!canTest()) {
return;
}
for (int i = 0; i < 10; ++i) {
// send 10 messages, and mark to close in last message
template.sendBodyAndHeader("direct:start1", "PAPPO" + i, HdfsConstants.HDFS_CLOSE, i == 9 ? true : false);
}
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel1");
FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
SequenceFile.Reader reader = new SequenceFile.Reader(fs1, file1, conf);
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
int i = 0;
while (reader.next(key, value)) {
Text txt = (Text) value;
assertEquals("PAPPO" + i, txt.toString());
++i;
}
IOHelper.close(reader);
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class FromFileToHdfsTest method tearDown.
@Override
public void tearDown() throws Exception {
if (!canTest()) {
return;
}
super.tearDown();
Configuration conf = new Configuration();
Path dir = new Path("target/outbox");
FileSystem fs = FileSystem.get(dir.toUri(), conf);
fs.delete(dir, true);
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsConsumerTest method testReadInt.
@Test
public void testReadInt() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-int").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, IntWritable.class);
NullWritable keyWritable = NullWritable.get();
IntWritable valueWritable = new IntWritable();
int value = 314159265;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsConsumerTest method testReadString.
@Test
public void testReadString() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-string").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, Text.class);
NullWritable keyWritable = NullWritable.get();
Text valueWritable = new Text();
String value = "CIAO!";
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsConsumerTest method testSimpleConsumerWithEmptySequenceFile.
@Test
public void testSimpleConsumerWithEmptySequenceFile() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-sequence-file").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(0);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&chunkSize=4096&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
Aggregations