use of org.apache.hadoop.io.Writable in project camel by apache.
the class HdfsProducerTest method testProducerClose.
@Test
public void testProducerClose() throws Exception {
if (!canTest()) {
return;
}
for (int i = 0; i < 10; ++i) {
// send 10 messages, and mark to close in last message
template.sendBodyAndHeader("direct:start1", "PAPPO" + i, HdfsConstants.HDFS_CLOSE, i == 9 ? true : false);
}
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel1");
SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(file1));
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
int i = 0;
while (reader.next(key, value)) {
Text txt = (Text) value;
assertEquals("PAPPO" + i, txt.toString());
++i;
}
IOHelper.close(reader);
}
use of org.apache.hadoop.io.Writable in project camel by apache.
the class HdfsProducerTest method testProducer.
@Test
public void testProducer() throws Exception {
if (!canTest()) {
return;
}
template.sendBody("direct:start1", "PAPPO");
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel1");
SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(file1));
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
assertEquals("PAPPO", value.toString());
IOHelper.close(reader);
}
use of org.apache.hadoop.io.Writable in project camel by apache.
the class HdfsProducerTest method testWriteBoolean.
@Test
public void testWriteBoolean() throws Exception {
if (!canTest()) {
return;
}
Boolean aBoolean = true;
template.sendBody("direct:write_boolean", aBoolean);
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-boolean");
SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(file1));
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
Boolean rBoolean = ((BooleanWritable) value).get();
assertEquals(rBoolean, aBoolean);
IOHelper.close(reader);
}
use of org.apache.hadoop.io.Writable in project camel by apache.
the class HdfsProducerTest method testWriteFloat.
@Test
public void testWriteFloat() throws Exception {
if (!canTest()) {
return;
}
float aFloat = 12.34f;
template.sendBody("direct:write_float", aFloat);
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-float");
SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(file1));
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
float rFloat = ((FloatWritable) value).get();
assertEquals(rFloat, aFloat, 0.0F);
IOHelper.close(reader);
}
use of org.apache.hadoop.io.Writable in project camel by apache.
the class HdfsProducerTest method testWriteDouble.
@Test
public void testWriteDouble() throws Exception {
if (!canTest()) {
return;
}
Double aDouble = 12.34D;
template.sendBody("direct:write_double", aDouble);
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-double");
SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(file1));
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
Double rDouble = ((DoubleWritable) value).get();
assertEquals(rDouble, aDouble);
IOHelper.close(reader);
}
Aggregations