Search in sources :

Example 51 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project goldenorb by jzachr.

the class SampleBooleanMessageTest method startServer.

/**
 * 
 */
@SuppressWarnings("unchecked")
@Before
public void startServer() throws IOException {
    server = new RPCServer<BooleanMessage, BooleanWritable>(SERVER_PORT);
    server.start();
    Configuration conf = new Configuration();
    InetSocketAddress addr = new InetSocketAddress("localhost", SERVER_PORT);
    if (client == null)
        client = (RPCProtocol<BooleanMessage, BooleanWritable>) RPC.waitForProxy(RPCProtocol.class, RPCProtocol.versionID, addr, conf);
}
Also used : BooleanMessage(org.goldenorb.types.message.BooleanMessage) Configuration(org.apache.hadoop.conf.Configuration) BooleanWritable(org.apache.hadoop.io.BooleanWritable) InetSocketAddress(java.net.InetSocketAddress) Before(org.junit.Before)

Example 52 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project camel by apache.

the class HdfsConsumerTest method testReadWithReadSuffix.

@Test
public void testReadWithReadSuffix() throws Exception {
    if (!canTest()) {
        return;
    }
    String[] beforeFiles = new File("target/test").list();
    int before = beforeFiles != null ? beforeFiles.length : 0;
    final Path file = new Path(new File("target/test/test-camel-boolean").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, BooleanWritable.class);
    NullWritable keyWritable = NullWritable.get();
    BooleanWritable valueWritable = new BooleanWritable();
    valueWritable.set(true);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs:localhost/" + file.getParent().toUri() + "?scheduler=#myScheduler&pattern=*&fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0&readSuffix=handled").to("mock:result");
        }
    });
    ScheduledExecutorService pool = context.getExecutorServiceManager().newScheduledThreadPool(null, "unitTestPool", 1);
    DefaultScheduledPollConsumerScheduler scheduler = new DefaultScheduledPollConsumerScheduler(pool);
    ((JndiRegistry) ((PropertyPlaceholderDelegateRegistry) context.getRegistry()).getRegistry()).bind("myScheduler", scheduler);
    context.start();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    resultEndpoint.assertIsSatisfied();
    // synchronize on pool that was used to run hdfs consumer thread
    scheduler.getScheduledExecutorService().shutdown();
    scheduler.getScheduledExecutorService().awaitTermination(5000, TimeUnit.MILLISECONDS);
    Set<String> files = new HashSet<String>(Arrays.asList(new File("target/test").list()));
    // there may be some leftover files before, so test that we only added 2 new files
    assertThat(files.size() - before, equalTo(2));
    assertTrue(files.remove("test-camel-boolean.handled"));
    assertTrue(files.remove(".test-camel-boolean.handled.crc"));
}
Also used : Path(org.apache.hadoop.fs.Path) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) Configuration(org.apache.hadoop.conf.Configuration) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) NullWritable(org.apache.hadoop.io.NullWritable) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) JndiRegistry(org.apache.camel.impl.JndiRegistry) SequenceFile(org.apache.hadoop.io.SequenceFile) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) DefaultScheduledPollConsumerScheduler(org.apache.camel.impl.DefaultScheduledPollConsumerScheduler) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 53 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project camel by apache.

the class HdfsConsumerTest method testReadWithReadSuffix.

@Test
public void testReadWithReadSuffix() throws Exception {
    if (!canTest()) {
        return;
    }
    String[] beforeFiles = new File("target/test").list();
    int before = beforeFiles != null ? beforeFiles.length : 0;
    final Path file = new Path(new File("target/test/test-camel-boolean").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class);
    NullWritable keyWritable = NullWritable.get();
    BooleanWritable valueWritable = new BooleanWritable();
    valueWritable.set(true);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs2:localhost/" + file.getParent().toUri() + "?scheduler=#myScheduler&pattern=*&fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0&readSuffix=handled").to("mock:result");
        }
    });
    ScheduledExecutorService pool = context.getExecutorServiceManager().newScheduledThreadPool(null, "unitTestPool", 1);
    DefaultScheduledPollConsumerScheduler scheduler = new DefaultScheduledPollConsumerScheduler(pool);
    ((JndiRegistry) ((PropertyPlaceholderDelegateRegistry) context.getRegistry()).getRegistry()).bind("myScheduler", scheduler);
    context.start();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    resultEndpoint.assertIsSatisfied();
    // synchronize on pool that was used to run hdfs consumer thread
    scheduler.getScheduledExecutorService().shutdown();
    scheduler.getScheduledExecutorService().awaitTermination(5000, TimeUnit.MILLISECONDS);
    Set<String> files = new HashSet<String>(Arrays.asList(new File("target/test").list()));
    // there may be some leftover files before, so test that we only added 2 new files
    assertThat(files.size() - before, equalTo(2));
    assertTrue(files.remove("test-camel-boolean.handled"));
    assertTrue(files.remove(".test-camel-boolean.handled.crc"));
}
Also used : Path(org.apache.hadoop.fs.Path) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) Configuration(org.apache.hadoop.conf.Configuration) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) NullWritable(org.apache.hadoop.io.NullWritable) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) JndiRegistry(org.apache.camel.impl.JndiRegistry) SequenceFile(org.apache.hadoop.io.SequenceFile) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) Writer(org.apache.hadoop.io.SequenceFile.Writer) DefaultScheduledPollConsumerScheduler(org.apache.camel.impl.DefaultScheduledPollConsumerScheduler) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 54 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project camel by apache.

the class HdfsProducerTest method testWriteBoolean.

@Test
public void testWriteBoolean() throws Exception {
    if (!canTest()) {
        return;
    }
    Boolean aBoolean = true;
    template.sendBody("direct:write_boolean", aBoolean);
    Configuration conf = new Configuration();
    Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-boolean");
    FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
    SequenceFile.Reader reader = new SequenceFile.Reader(fs1, file1, conf);
    Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
    reader.next(key, value);
    Boolean rBoolean = ((BooleanWritable) value).get();
    assertEquals(rBoolean, aBoolean);
    IOHelper.close(reader);
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) SequenceFile(org.apache.hadoop.io.SequenceFile) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FileSystem(org.apache.hadoop.fs.FileSystem) Writable(org.apache.hadoop.io.Writable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) Test(org.junit.Test)

Example 55 with BooleanWritable

use of org.apache.hadoop.io.BooleanWritable in project hadoop by apache.

the class TestPipeApplication method testPipesReduser.

/**
   * test org.apache.hadoop.mapred.pipes.PipesReducer
   * test the transfer of data: key and value
   *
   * @throws Exception
   */
@Test
public void testPipesReduser() throws Exception {
    File[] psw = cleanTokenPasswordFile();
    JobConf conf = new JobConf();
    try {
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, conf.getCredentials());
        File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
        reducer.configure(conf);
        BooleanWritable bw = new BooleanWritable(true);
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
        initStdOut(conf);
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
        CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(new Counters.Counter(), new Progress());
        Reporter reporter = new TestTaskReporter();
        List<Text> texts = new ArrayList<Text>();
        texts.add(new Text("first"));
        texts.add(new Text("second"));
        texts.add(new Text("third"));
        reducer.reduce(bw, texts.iterator(), output, reporter);
        reducer.close();
        String stdOut = readStdOut(conf);
        // test data: key
        assertTrue(stdOut.contains("reducer key :true"));
        // and values
        assertTrue(stdOut.contains("reduce value  :first"));
        assertTrue(stdOut.contains("reduce value  :second"));
        assertTrue(stdOut.contains("reduce value  :third"));
    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}
Also used : Reporter(org.apache.hadoop.mapred.Reporter) ArrayList(java.util.ArrayList) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) Counter(org.apache.hadoop.mapred.Counters.Counter) AMRMTokenIdentifier(org.apache.hadoop.yarn.security.AMRMTokenIdentifier) BooleanWritable(org.apache.hadoop.io.BooleanWritable) Counters(org.apache.hadoop.mapred.Counters) File(java.io.File) JobConf(org.apache.hadoop.mapred.JobConf) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Aggregations

BooleanWritable (org.apache.hadoop.io.BooleanWritable)63 IntWritable (org.apache.hadoop.io.IntWritable)41 LongWritable (org.apache.hadoop.io.LongWritable)40 FloatWritable (org.apache.hadoop.io.FloatWritable)37 Text (org.apache.hadoop.io.Text)31 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)27 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)26 BytesWritable (org.apache.hadoop.io.BytesWritable)26 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)25 Writable (org.apache.hadoop.io.Writable)17 Test (org.junit.Test)17 ArrayList (java.util.ArrayList)15 Configuration (org.apache.hadoop.conf.Configuration)12 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)12 Random (java.util.Random)11 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)10 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)9 KeyValue (org.apache.hadoop.hbase.KeyValue)7 Result (org.apache.hadoop.hbase.client.Result)7 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)7