use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsConsumerTest method testReadBoolean.
@Test
public void testReadBoolean() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-boolean").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs1 = FileSystem.get(file.toUri(), conf);
SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, BooleanWritable.class);
NullWritable keyWritable = NullWritable.get();
BooleanWritable valueWritable = new BooleanWritable();
valueWritable.set(true);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
resultEndpoint.assertIsSatisfied();
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsConsumerTest method testReadInt.
@Test
public void testReadInt() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-int").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs1 = FileSystem.get(file.toUri(), conf);
SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, IntWritable.class);
NullWritable keyWritable = NullWritable.get();
IntWritable valueWritable = new IntWritable();
int value = 314159265;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HdfsConsumerTest method testSimpleConsumerFileWithSizeEqualToNChunks.
@Test
public void testSimpleConsumerFileWithSizeEqualToNChunks() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-normal-file").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(file.toUri(), conf);
FSDataOutputStream out = fs.create(file);
// size = 5 times chunk size = 210 bytes
for (int i = 0; i < 42; ++i) {
out.write(new byte[] { 0x61, 0x62, 0x63, 0x64, 0x65 });
out.flush();
}
out.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(5);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&chunkSize=42&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
assertThat(resultEndpoint.getReceivedExchanges().get(0).getIn().getBody(ByteArrayOutputStream.class).toByteArray().length, equalTo(42));
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class CamelHBaseTestSupport method putMultipleRows.
protected void putMultipleRows() throws IOException {
Configuration configuration = hbaseUtil.getHBaseAdmin().getConfiguration();
Connection connection = ConnectionFactory.createConnection(configuration);
Table table = connection.getTable(TableName.valueOf(PERSON_TABLE.getBytes()));
for (int r = 0; r < key.length; r++) {
Put put = new Put(key[r].getBytes());
put.addColumn(family[0].getBytes(), column[0][0].getBytes(), body[r][0][0].getBytes());
table.put(put);
}
IOHelper.close(table);
}
use of org.apache.hadoop.conf.Configuration in project camel by apache.
the class HBaseConvertionsTest method testPutMultiRows.
@Test
public void testPutMultiRows() throws Exception {
if (systemReady) {
ProducerTemplate template = context.createProducerTemplate();
Map<String, Object> headers = new HashMap<String, Object>();
headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(), key[0]);
headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(), INFO_FAMILY);
headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(), column[0]);
headers.put(HBaseAttribute.HBASE_VALUE.asHeader(), body[0]);
headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(2), key[1]);
headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(2), INFO_FAMILY);
headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(2), column[0]);
headers.put(HBaseAttribute.HBASE_VALUE.asHeader(2), body[1]);
headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(3), key[2]);
headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(3), INFO_FAMILY);
headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(3), column[0]);
headers.put(HBaseAttribute.HBASE_VALUE.asHeader(3), body[2]);
headers.put(HBaseConstants.OPERATION, HBaseConstants.PUT);
template.sendBodyAndHeaders("direct:start", null, headers);
Configuration configuration = hbaseUtil.getHBaseAdmin().getConfiguration();
Connection conn = ConnectionFactory.createConnection(configuration);
Table bar = conn.getTable(TableName.valueOf(PERSON_TABLE));
Get get = new Get(Bytes.toBytes((Integer) key[0]));
//Check row 1
get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
Result result = bar.get(get);
byte[] resultValue = result.value();
assertArrayEquals(Bytes.toBytes((Long) body[0]), resultValue);
//Check row 2
get = new Get(Bytes.toBytes((String) key[1]));
get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
result = bar.get(get);
resultValue = result.value();
assertArrayEquals(Bytes.toBytes((Boolean) body[1]), resultValue);
//Check row 3
get = new Get(Bytes.toBytes((String) key[2]));
get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
result = bar.get(get);
resultValue = result.value();
assertArrayEquals(Bytes.toBytes((String) body[2]), resultValue);
IOHelper.close(bar);
}
}
Aggregations