Search in sources :

Example 61 with NullWritable

use of org.apache.hadoop.io.NullWritable in project camel by apache.

the class HdfsConsumerTest method testReadFloat.

@Test
public void testReadFloat() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, FloatWritable.class);
    NullWritable keyWritable = NullWritable.get();
    FloatWritable valueWritable = new FloatWritable();
    float value = 3.1415926535f;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    resultEndpoint.assertIsSatisfied();
}
Also used : Path(org.apache.hadoop.fs.Path) FloatWritable(org.apache.hadoop.io.FloatWritable) Configuration(org.apache.hadoop.conf.Configuration) SequenceFile(org.apache.hadoop.io.SequenceFile) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) NullWritable(org.apache.hadoop.io.NullWritable) Test(org.junit.Test)

Example 62 with NullWritable

use of org.apache.hadoop.io.NullWritable in project camel by apache.

the class HdfsConsumerTest method testReadString.

@Test
public void testReadString() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-string").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, Text.class);
    NullWritable keyWritable = NullWritable.get();
    Text valueWritable = new Text();
    String value = "CIAO!";
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    resultEndpoint.assertIsSatisfied();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) SequenceFile(org.apache.hadoop.io.SequenceFile) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) FileSystem(org.apache.hadoop.fs.FileSystem) Text(org.apache.hadoop.io.Text) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) NullWritable(org.apache.hadoop.io.NullWritable) Test(org.junit.Test)

Example 63 with NullWritable

use of org.apache.hadoop.io.NullWritable in project camel by apache.

the class HdfsConsumerTest method testReadByte.

@Test
public void testReadByte() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-byte").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, ByteWritable.class);
    NullWritable keyWritable = NullWritable.get();
    ByteWritable valueWritable = new ByteWritable();
    byte value = 3;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    resultEndpoint.message(0).body(byte.class).isEqualTo(3);
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    resultEndpoint.assertIsSatisfied();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) SequenceFile(org.apache.hadoop.io.SequenceFile) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) NullWritable(org.apache.hadoop.io.NullWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) Test(org.junit.Test)

Example 64 with NullWritable

use of org.apache.hadoop.io.NullWritable in project camel by apache.

the class HdfsConsumerTest method testReadWithReadSuffix.

@Test
public void testReadWithReadSuffix() throws Exception {
    if (!canTest()) {
        return;
    }
    String[] beforeFiles = new File("target/test").list();
    int before = beforeFiles != null ? beforeFiles.length : 0;
    final Path file = new Path(new File("target/test/test-camel-boolean").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, BooleanWritable.class);
    NullWritable keyWritable = NullWritable.get();
    BooleanWritable valueWritable = new BooleanWritable();
    valueWritable.set(true);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs:localhost/" + file.getParent().toUri() + "?scheduler=#myScheduler&pattern=*&fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0&readSuffix=handled").to("mock:result");
        }
    });
    ScheduledExecutorService pool = context.getExecutorServiceManager().newScheduledThreadPool(null, "unitTestPool", 1);
    DefaultScheduledPollConsumerScheduler scheduler = new DefaultScheduledPollConsumerScheduler(pool);
    ((JndiRegistry) ((PropertyPlaceholderDelegateRegistry) context.getRegistry()).getRegistry()).bind("myScheduler", scheduler);
    context.start();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    resultEndpoint.assertIsSatisfied();
    // synchronize on pool that was used to run hdfs consumer thread
    scheduler.getScheduledExecutorService().shutdown();
    scheduler.getScheduledExecutorService().awaitTermination(5000, TimeUnit.MILLISECONDS);
    Set<String> files = new HashSet<String>(Arrays.asList(new File("target/test").list()));
    // there may be some leftover files before, so test that we only added 2 new files
    assertThat(files.size() - before, equalTo(2));
    assertTrue(files.remove("test-camel-boolean.handled"));
    assertTrue(files.remove(".test-camel-boolean.handled.crc"));
}
Also used : Path(org.apache.hadoop.fs.Path) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) Configuration(org.apache.hadoop.conf.Configuration) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) NullWritable(org.apache.hadoop.io.NullWritable) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) JndiRegistry(org.apache.camel.impl.JndiRegistry) SequenceFile(org.apache.hadoop.io.SequenceFile) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) DefaultScheduledPollConsumerScheduler(org.apache.camel.impl.DefaultScheduledPollConsumerScheduler) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 65 with NullWritable

use of org.apache.hadoop.io.NullWritable in project camel by apache.

the class HdfsConsumerTest method testReadByte.

@Test
public void testReadByte() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-byte").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, ByteWritable.class);
    NullWritable keyWritable = NullWritable.get();
    ByteWritable valueWritable = new ByteWritable();
    byte value = 3;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    resultEndpoint.message(0).body(byte.class).isEqualTo(3);
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    resultEndpoint.assertIsSatisfied();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) SequenceFile(org.apache.hadoop.io.SequenceFile) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) NullWritable(org.apache.hadoop.io.NullWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) Writer(org.apache.hadoop.io.SequenceFile.Writer) Test(org.junit.Test)

Aggregations

NullWritable (org.apache.hadoop.io.NullWritable)101 Test (org.junit.Test)65 Configuration (org.apache.hadoop.conf.Configuration)41 Path (org.apache.hadoop.fs.Path)41 File (java.io.File)29 FileSystem (org.apache.hadoop.fs.FileSystem)26 SequenceFile (org.apache.hadoop.io.SequenceFile)22 JobConf (org.apache.hadoop.mapred.JobConf)22 RouteBuilder (org.apache.camel.builder.RouteBuilder)18 MockEndpoint (org.apache.camel.component.mock.MockEndpoint)18 ArrayFile (org.apache.hadoop.io.ArrayFile)18 Text (org.apache.hadoop.io.Text)16 InputSplit (org.apache.hadoop.mapred.InputSplit)16 LongWritable (org.apache.hadoop.io.LongWritable)15 IntWritable (org.apache.hadoop.io.IntWritable)10 Writer (org.apache.hadoop.io.SequenceFile.Writer)9 CharacteristicSetWritable (org.apache.jena.hadoop.rdf.types.CharacteristicSetWritable)8 IOException (java.io.IOException)7 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)7 FloatWritable (org.apache.hadoop.io.FloatWritable)7