Search in sources :

Example 26 with Path

use of org.apache.hadoop.fs.Path in project camel by apache.

the class HdfsConsumerTest method testReadFloat.

@Test
public void testReadFloat() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, FloatWritable.class);
    NullWritable keyWritable = NullWritable.get();
    FloatWritable valueWritable = new FloatWritable();
    float value = 3.1415926535f;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    resultEndpoint.assertIsSatisfied();
}
Also used : Path(org.apache.hadoop.fs.Path) FloatWritable(org.apache.hadoop.io.FloatWritable) Configuration(org.apache.hadoop.conf.Configuration) SequenceFile(org.apache.hadoop.io.SequenceFile) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) NullWritable(org.apache.hadoop.io.NullWritable) Writer(org.apache.hadoop.io.SequenceFile.Writer) Test(org.junit.Test)

Example 27 with Path

use of org.apache.hadoop.fs.Path in project camel by apache.

the class HdfsConsumerTest method testSimpleConsumerFileWithSizeEqualToNChunks.

@Test
public void testSimpleConsumerFileWithSizeEqualToNChunks() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-normal-file").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(file.toUri(), conf);
    FSDataOutputStream out = fs.create(file);
    // size = 5 times chunk size = 210 bytes
    for (int i = 0; i < 42; ++i) {
        out.write(new byte[] { 0x61, 0x62, 0x63, 0x64, 0x65 });
        out.flush();
    }
    out.close();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(5);
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&chunkSize=42&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    resultEndpoint.assertIsSatisfied();
    assertThat(resultEndpoint.getReceivedExchanges().get(0).getIn().getBody(ByteArrayOutputStream.class).toByteArray().length, equalTo(42));
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) FileSystem(org.apache.hadoop.fs.FileSystem) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) Test(org.junit.Test)

Example 28 with Path

use of org.apache.hadoop.fs.Path in project camel by apache.

the class HdfsConsumerTest method testReadBoolean.

@Test
public void testReadBoolean() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-boolean").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class);
    NullWritable keyWritable = NullWritable.get();
    BooleanWritable valueWritable = new BooleanWritable();
    valueWritable.set(true);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    resultEndpoint.assertIsSatisfied();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) SequenceFile(org.apache.hadoop.io.SequenceFile) RouteBuilder(org.apache.camel.builder.RouteBuilder) BooleanWritable(org.apache.hadoop.io.BooleanWritable) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) NullWritable(org.apache.hadoop.io.NullWritable) Writer(org.apache.hadoop.io.SequenceFile.Writer) Test(org.junit.Test)

Example 29 with Path

use of org.apache.hadoop.fs.Path in project camel by apache.

the class HdfsProducerConsumerTest method testSimpleSplitWriteRead.

@Test
public void testSimpleSplitWriteRead() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-simple-write-file").getAbsolutePath());
    context.addRoutes(new RouteBuilder() {

        @Override
        public void configure() {
            from("direct:start").to("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&splitStrategy=BYTES:5,IDLE:1000");
            from("hdfs2:localhost/" + file.toUri() + "?initialDelay=2000&fileSystemType=LOCAL&chunkSize=5").to("mock:result");
        }
    });
    context.start();
    List<String> expectedResults = new ArrayList<String>();
    for (int i = 0; i < 10; ++i) {
        template.sendBody("direct:start", "CIAO" + i);
        expectedResults.add("CIAO" + i);
    }
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(10);
    resultEndpoint.assertIsSatisfied();
    List<Exchange> exchanges = resultEndpoint.getExchanges();
    assertEquals(10, exchanges.size());
    resultEndpoint.expectedBodiesReceivedInAnyOrder(expectedResults);
}
Also used : Path(org.apache.hadoop.fs.Path) Exchange(org.apache.camel.Exchange) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) ArrayList(java.util.ArrayList) File(java.io.File) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) Test(org.junit.Test)

Example 30 with Path

use of org.apache.hadoop.fs.Path in project camel by apache.

the class HdfsProducerSplitTest method doTest.

private void doTest(int routeNr) throws Exception {
    if (!canTest()) {
        return;
    }
    for (int i = 0; i < 10; ++i) {
        template.sendBody("direct:start" + routeNr, "CIAO" + i);
    }
    stopCamelContext();
    FileSystem fs = FileSystem.get(new Configuration());
    FileStatus[] status = fs.listStatus(new Path("file:///" + BASE_FILE.toUri() + routeNr));
    assertEquals(10, status.length);
    for (FileStatus fileStatus : status) {
        BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(fileStatus.getPath())));
        assertTrue(br.readLine().startsWith("CIAO"));
        assertNull(br.readLine());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) InputStreamReader(java.io.InputStreamReader) FileSystem(org.apache.hadoop.fs.FileSystem) BufferedReader(java.io.BufferedReader)

Aggregations

Path (org.apache.hadoop.fs.Path)7063 Test (org.junit.Test)2926 FileSystem (org.apache.hadoop.fs.FileSystem)2223 Configuration (org.apache.hadoop.conf.Configuration)1608 IOException (java.io.IOException)1574 FileStatus (org.apache.hadoop.fs.FileStatus)912 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)662 ArrayList (java.util.ArrayList)644 File (java.io.File)518 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)342 JobConf (org.apache.hadoop.mapred.JobConf)332 Job (org.apache.hadoop.mapreduce.Job)322 FileNotFoundException (java.io.FileNotFoundException)319 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)293 HashMap (java.util.HashMap)279 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)276 FsPermission (org.apache.hadoop.fs.permission.FsPermission)270 URI (java.net.URI)267 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)220 Text (org.apache.hadoop.io.Text)185