use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsProducerTest method testWriteTextWithKey.
@Test
public void testWriteTextWithKey() throws Exception {
if (!canTest()) {
return;
}
String txtKey = "THEKEY";
String txtValue = "CIAO MONDO !";
template.sendBodyAndHeader("direct:write_text2", txtValue, "KEY", txtKey);
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-text2");
FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
SequenceFile.Reader reader = new SequenceFile.Reader(fs1, file1, conf);
Text key = (Text) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Text value = (Text) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
assertEquals(key.toString(), txtKey);
assertEquals(value.toString(), txtValue);
IOHelper.close(reader);
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsProducerTest method testArrayWriteText.
@Test
public void testArrayWriteText() throws Exception {
if (!canTest()) {
return;
}
String txtValue = "CIAO MONDO !";
template.sendBody("direct:write_text4", txtValue);
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-text4");
FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
ArrayFile.Reader reader = new ArrayFile.Reader(fs1, "file:///" + TEMP_DIR.toUri() + "/test-camel-text4", conf);
Text value = (Text) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(value);
assertEquals(value.toString(), txtValue);
IOHelper.close(reader);
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsProducerTest method testProducer.
@Test
public void testProducer() throws Exception {
if (!canTest()) {
return;
}
template.sendBody("direct:start1", "PAPPO");
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel1");
FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
SequenceFile.Reader reader = new SequenceFile.Reader(fs1, file1, conf);
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
assertEquals("PAPPO", value.toString());
IOHelper.close(reader);
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsProducerTest method testWriteLong.
@Test
public void testWriteLong() throws Exception {
if (!canTest()) {
return;
}
long aLong = 1234567890;
template.sendBody("direct:write_long", aLong);
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-long");
FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
SequenceFile.Reader reader = new SequenceFile.Reader(fs1, file1, conf);
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
long rLong = ((LongWritable) value).get();
assertEquals(rLong, aLong);
IOHelper.close(reader);
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsProducerTest method testWriteFloat.
@Test
public void testWriteFloat() throws Exception {
if (!canTest()) {
return;
}
float aFloat = 12.34f;
template.sendBody("direct:write_float", aFloat);
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-float");
FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
SequenceFile.Reader reader = new SequenceFile.Reader(fs1, file1, conf);
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
float rFloat = ((FloatWritable) value).get();
assertEquals(rFloat, aFloat, 0.0F);
IOHelper.close(reader);
}
Aggregations