Search in sources :

Example 1 with Configuration

use of org.apache.hadoop.conf.Configuration in project camel by apache.

the class HdfsConsumerTest method testReadBoolean.

@Test
public void testReadBoolean() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-boolean").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, BooleanWritable.class);
    NullWritable keyWritable = NullWritable.get();
    BooleanWritable valueWritable = new BooleanWritable();
    valueWritable.set(true);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    resultEndpoint.assertIsSatisfied();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) SequenceFile(org.apache.hadoop.io.SequenceFile) RouteBuilder(org.apache.camel.builder.RouteBuilder) BooleanWritable(org.apache.hadoop.io.BooleanWritable) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) NullWritable(org.apache.hadoop.io.NullWritable) Test(org.junit.Test)

Example 2 with Configuration

use of org.apache.hadoop.conf.Configuration in project camel by apache.

the class HdfsConsumerTest method testReadInt.

@Test
public void testReadInt() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-int").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, IntWritable.class);
    NullWritable keyWritable = NullWritable.get();
    IntWritable valueWritable = new IntWritable();
    int value = 314159265;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    resultEndpoint.assertIsSatisfied();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) SequenceFile(org.apache.hadoop.io.SequenceFile) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) NullWritable(org.apache.hadoop.io.NullWritable) IntWritable(org.apache.hadoop.io.IntWritable) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) Test(org.junit.Test)

Example 3 with Configuration

use of org.apache.hadoop.conf.Configuration in project camel by apache.

the class HdfsConsumerTest method testSimpleConsumerFileWithSizeEqualToNChunks.

@Test
public void testSimpleConsumerFileWithSizeEqualToNChunks() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-normal-file").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(file.toUri(), conf);
    FSDataOutputStream out = fs.create(file);
    // size = 5 times chunk size = 210 bytes
    for (int i = 0; i < 42; ++i) {
        out.write(new byte[] { 0x61, 0x62, 0x63, 0x64, 0x65 });
        out.flush();
    }
    out.close();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(5);
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&chunkSize=42&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    resultEndpoint.assertIsSatisfied();
    assertThat(resultEndpoint.getReceivedExchanges().get(0).getIn().getBody(ByteArrayOutputStream.class).toByteArray().length, equalTo(42));
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) FileSystem(org.apache.hadoop.fs.FileSystem) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) Test(org.junit.Test)

Example 4 with Configuration

use of org.apache.hadoop.conf.Configuration in project camel by apache.

the class CamelHBaseTestSupport method putMultipleRows.

protected void putMultipleRows() throws IOException {
    Configuration configuration = hbaseUtil.getHBaseAdmin().getConfiguration();
    Connection connection = ConnectionFactory.createConnection(configuration);
    Table table = connection.getTable(TableName.valueOf(PERSON_TABLE.getBytes()));
    for (int r = 0; r < key.length; r++) {
        Put put = new Put(key[r].getBytes());
        put.addColumn(family[0].getBytes(), column[0][0].getBytes(), body[r][0][0].getBytes());
        table.put(put);
    }
    IOHelper.close(table);
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) Connection(org.apache.hadoop.hbase.client.Connection) Put(org.apache.hadoop.hbase.client.Put)

Example 5 with Configuration

use of org.apache.hadoop.conf.Configuration in project camel by apache.

the class HBaseConvertionsTest method testPutMultiRows.

@Test
public void testPutMultiRows() throws Exception {
    if (systemReady) {
        ProducerTemplate template = context.createProducerTemplate();
        Map<String, Object> headers = new HashMap<String, Object>();
        headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(), key[0]);
        headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(), INFO_FAMILY);
        headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(), column[0]);
        headers.put(HBaseAttribute.HBASE_VALUE.asHeader(), body[0]);
        headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(2), key[1]);
        headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(2), INFO_FAMILY);
        headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(2), column[0]);
        headers.put(HBaseAttribute.HBASE_VALUE.asHeader(2), body[1]);
        headers.put(HBaseAttribute.HBASE_ROW_ID.asHeader(3), key[2]);
        headers.put(HBaseAttribute.HBASE_FAMILY.asHeader(3), INFO_FAMILY);
        headers.put(HBaseAttribute.HBASE_QUALIFIER.asHeader(3), column[0]);
        headers.put(HBaseAttribute.HBASE_VALUE.asHeader(3), body[2]);
        headers.put(HBaseConstants.OPERATION, HBaseConstants.PUT);
        template.sendBodyAndHeaders("direct:start", null, headers);
        Configuration configuration = hbaseUtil.getHBaseAdmin().getConfiguration();
        Connection conn = ConnectionFactory.createConnection(configuration);
        Table bar = conn.getTable(TableName.valueOf(PERSON_TABLE));
        Get get = new Get(Bytes.toBytes((Integer) key[0]));
        //Check row 1
        get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
        Result result = bar.get(get);
        byte[] resultValue = result.value();
        assertArrayEquals(Bytes.toBytes((Long) body[0]), resultValue);
        //Check row 2
        get = new Get(Bytes.toBytes((String) key[1]));
        get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
        result = bar.get(get);
        resultValue = result.value();
        assertArrayEquals(Bytes.toBytes((Boolean) body[1]), resultValue);
        //Check row 3
        get = new Get(Bytes.toBytes((String) key[2]));
        get.addColumn(INFO_FAMILY.getBytes(), column[0].getBytes());
        result = bar.get(get);
        resultValue = result.value();
        assertArrayEquals(Bytes.toBytes((String) body[2]), resultValue);
        IOHelper.close(bar);
    }
}
Also used : ProducerTemplate(org.apache.camel.ProducerTemplate) Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) Connection(org.apache.hadoop.hbase.client.Connection) Result(org.apache.hadoop.hbase.client.Result) Get(org.apache.hadoop.hbase.client.Get) Test(org.junit.Test)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)5973 Test (org.junit.Test)3243 Path (org.apache.hadoop.fs.Path)1602 FileSystem (org.apache.hadoop.fs.FileSystem)903 IOException (java.io.IOException)850 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)727 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)517 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)502 File (java.io.File)499 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)388 ArrayList (java.util.ArrayList)360 URI (java.net.URI)319 BeforeClass (org.junit.BeforeClass)275 Job (org.apache.hadoop.mapreduce.Job)272 Before (org.junit.Before)264 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)219 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)203 HashMap (java.util.HashMap)192 FileStatus (org.apache.hadoop.fs.FileStatus)190 Properties (java.util.Properties)187