Search in sources :

Example 56 with FloatWritable

use of org.apache.hadoop.io.FloatWritable in project camel by apache.

the class HdfsConsumerTest method testReadFloat.

@Test
public void testReadFloat() throws Exception {
    if (!canTest()) {
        return;
    }
    final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, FloatWritable.class);
    NullWritable keyWritable = NullWritable.get();
    FloatWritable valueWritable = new FloatWritable();
    float value = 3.1415926535f;
    valueWritable.set(value);
    writer.append(keyWritable, valueWritable);
    writer.sync();
    writer.close();
    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(1);
    context.addRoutes(new RouteBuilder() {

        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
        }
    });
    context.start();
    resultEndpoint.assertIsSatisfied();
}
Also used : Path(org.apache.hadoop.fs.Path) FloatWritable(org.apache.hadoop.io.FloatWritable) Configuration(org.apache.hadoop.conf.Configuration) SequenceFile(org.apache.hadoop.io.SequenceFile) RouteBuilder(org.apache.camel.builder.RouteBuilder) MockEndpoint(org.apache.camel.component.mock.MockEndpoint) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayFile(org.apache.hadoop.io.ArrayFile) SequenceFile(org.apache.hadoop.io.SequenceFile) File(java.io.File) NullWritable(org.apache.hadoop.io.NullWritable) Test(org.junit.Test)

Example 57 with FloatWritable

use of org.apache.hadoop.io.FloatWritable in project hadoop by apache.

the class PipeApplicationRunnableStub method binaryProtocolStub.

public void binaryProtocolStub() {
    try {
        initSoket();
        System.out.println("start OK");
        // RUN_MAP.code
        // should be 3
        int answer = WritableUtils.readVInt(dataInput);
        System.out.println("RunMap:" + answer);
        TestPipeApplication.FakeSplit split = new TestPipeApplication.FakeSplit();
        readObject(split, dataInput);
        WritableUtils.readVInt(dataInput);
        WritableUtils.readVInt(dataInput);
        // end runMap
        // get InputTypes
        WritableUtils.readVInt(dataInput);
        String inText = Text.readString(dataInput);
        System.out.println("Key class:" + inText);
        inText = Text.readString(dataInput);
        System.out.println("Value class:" + inText);
        @SuppressWarnings("unused") int inCode = 0;
        // read all data from sender and write to output
        while ((inCode = WritableUtils.readVInt(dataInput)) == 4) {
            FloatWritable key = new FloatWritable();
            NullWritable value = NullWritable.get();
            readObject(key, dataInput);
            System.out.println("value:" + key.get());
            readObject(value, dataInput);
        }
        WritableUtils.writeVInt(dataOut, 54);
        dataOut.flush();
        dataOut.close();
    } catch (Exception x) {
        x.printStackTrace();
    } finally {
        closeSoket();
    }
}
Also used : FloatWritable(org.apache.hadoop.io.FloatWritable) NullWritable(org.apache.hadoop.io.NullWritable)

Example 58 with FloatWritable

use of org.apache.hadoop.io.FloatWritable in project hadoop by apache.

the class TestPipeApplication method testApplication.

/**
   * test org.apache.hadoop.mapred.pipes.Application
   * test a internal functions: MessageType.REGISTER_COUNTER,  INCREMENT_COUNTER, STATUS, PROGRESS...
   *
   * @throws Throwable
   */
@Test
public void testApplication() throws Throwable {
    JobConf conf = new JobConf();
    RecordReader<FloatWritable, NullWritable> rReader = new Reader();
    // client for test
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationStub");
    TestTaskReporter reporter = new TestTaskReporter();
    File[] psw = cleanTokenPasswordFile();
    try {
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        // token for authorization
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, conf.getCredentials());
        FakeCollector output = new FakeCollector(new Counters.Counter(), new Progress());
        FileSystem fs = new RawLocalFileSystem();
        fs.initialize(FsConstants.LOCAL_FS_URI, conf);
        Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(new Path(workSpace.getAbsolutePath() + File.separator + "outfile")), IntWritable.class, Text.class, null, null, true);
        output.setWriter(wr);
        conf.set(Submitter.PRESERVE_COMMANDFILE, "true");
        initStdOut(conf);
        Application<WritableComparable<IntWritable>, Writable, IntWritable, Text> application = new Application<WritableComparable<IntWritable>, Writable, IntWritable, Text>(conf, rReader, output, reporter, IntWritable.class, Text.class);
        application.getDownlink().flush();
        application.getDownlink().mapItem(new IntWritable(3), new Text("txt"));
        application.getDownlink().flush();
        application.waitForFinish();
        wr.close();
        // test getDownlink().mapItem();
        String stdOut = readStdOut(conf);
        assertTrue(stdOut.contains("key:3"));
        assertTrue(stdOut.contains("value:txt"));
        // reporter test counter, and status should be sended
        // test MessageType.REGISTER_COUNTER and INCREMENT_COUNTER
        assertEquals(1.0, reporter.getProgress(), 0.01);
        assertNotNull(reporter.getCounter("group", "name"));
        // test status MessageType.STATUS
        assertEquals(reporter.getStatus(), "PROGRESS");
        stdOut = readFile(new File(workSpace.getAbsolutePath() + File.separator + "outfile"));
        // check MessageType.PROGRESS
        assertEquals(0.55f, rReader.getProgress(), 0.001);
        application.getDownlink().close();
        // test MessageType.OUTPUT
        Entry<IntWritable, Text> entry = output.getCollect().entrySet().iterator().next();
        assertEquals(123, entry.getKey().get());
        assertEquals("value", entry.getValue().toString());
        try {
            // try to abort
            application.abort(new Throwable());
            fail();
        } catch (IOException e) {
            // abort works ?
            assertEquals("pipe child exception", e.getMessage());
        }
    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}
Also used : RawLocalFileSystem(org.apache.hadoop.fs.RawLocalFileSystem) RecordReader(org.apache.hadoop.mapred.RecordReader) NullWritable(org.apache.hadoop.io.NullWritable) Writable(org.apache.hadoop.io.Writable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) Token(org.apache.hadoop.security.token.Token) AMRMTokenIdentifier(org.apache.hadoop.yarn.security.AMRMTokenIdentifier) FileSystem(org.apache.hadoop.fs.FileSystem) RawLocalFileSystem(org.apache.hadoop.fs.RawLocalFileSystem) JobConf(org.apache.hadoop.mapred.JobConf) IntWritable(org.apache.hadoop.io.IntWritable) Path(org.apache.hadoop.fs.Path) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) NullWritable(org.apache.hadoop.io.NullWritable) Counter(org.apache.hadoop.mapred.Counters.Counter) FloatWritable(org.apache.hadoop.io.FloatWritable) WritableComparable(org.apache.hadoop.io.WritableComparable) Counters(org.apache.hadoop.mapred.Counters) File(java.io.File) Writer(org.apache.hadoop.mapred.IFile.Writer) Test(org.junit.Test)

Example 59 with FloatWritable

use of org.apache.hadoop.io.FloatWritable in project hadoop by apache.

the class TestPipeApplication method testRunner.

/**
   * test PipesMapRunner    test the transfer data from reader
   *
   * @throws Exception
   */
@Test
public void testRunner() throws Exception {
    // clean old password files
    File[] psw = cleanTokenPasswordFile();
    try {
        RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
        JobConf conf = new JobConf();
        conf.set(Submitter.IS_JAVA_RR, "true");
        // for stdour and stderror
        conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
        CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(new Counters.Counter(), new Progress());
        FileSystem fs = new RawLocalFileSystem();
        fs.initialize(FsConstants.LOCAL_FS_URI, conf);
        Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(new Path(workSpace + File.separator + "outfile")), IntWritable.class, Text.class, null, null, true);
        output.setWriter(wr);
        // stub for client
        File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");
        conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
        // token for authorization
        Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>("user".getBytes(), "password".getBytes(), new Text("kind"), new Text("service"));
        TokenCache.setJobToken(token, conf.getCredentials());
        conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
        TestTaskReporter reporter = new TestTaskReporter();
        PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();
        initStdOut(conf);
        runner.configure(conf);
        runner.run(rReader, output, reporter);
        String stdOut = readStdOut(conf);
        // test part of translated data. As common file for client and test -
        // clients stdOut
        // check version
        assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
        // check key and value classes
        assertTrue(stdOut.contains("Key class:org.apache.hadoop.io.FloatWritable"));
        assertTrue(stdOut.contains("Value class:org.apache.hadoop.io.NullWritable"));
        // test have sent all data from reader
        assertTrue(stdOut.contains("value:0.0"));
        assertTrue(stdOut.contains("value:9.0"));
    } finally {
        if (psw != null) {
            // remove password files
            for (File file : psw) {
                file.deleteOnExit();
            }
        }
    }
}
Also used : RawLocalFileSystem(org.apache.hadoop.fs.RawLocalFileSystem) Token(org.apache.hadoop.security.token.Token) AMRMTokenIdentifier(org.apache.hadoop.yarn.security.AMRMTokenIdentifier) FileSystem(org.apache.hadoop.fs.FileSystem) RawLocalFileSystem(org.apache.hadoop.fs.RawLocalFileSystem) JobConf(org.apache.hadoop.mapred.JobConf) IntWritable(org.apache.hadoop.io.IntWritable) Path(org.apache.hadoop.fs.Path) Text(org.apache.hadoop.io.Text) NullWritable(org.apache.hadoop.io.NullWritable) Counter(org.apache.hadoop.mapred.Counters.Counter) FloatWritable(org.apache.hadoop.io.FloatWritable) Counters(org.apache.hadoop.mapred.Counters) File(java.io.File) Writer(org.apache.hadoop.mapred.IFile.Writer) Test(org.junit.Test)

Example 60 with FloatWritable

use of org.apache.hadoop.io.FloatWritable in project hadoop by apache.

the class TestTupleWritable method testWritable.

@Test
public void testWritable() throws Exception {
    Random r = new Random();
    Writable[] writs = { new BooleanWritable(r.nextBoolean()), new FloatWritable(r.nextFloat()), new FloatWritable(r.nextFloat()), new IntWritable(r.nextInt()), new LongWritable(r.nextLong()), new BytesWritable("dingo".getBytes()), new LongWritable(r.nextLong()), new IntWritable(r.nextInt()), new BytesWritable("yak".getBytes()), new IntWritable(r.nextInt()) };
    TupleWritable sTuple = makeTuple(writs);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    sTuple.write(new DataOutputStream(out));
    ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
    TupleWritable dTuple = new TupleWritable();
    dTuple.readFields(new DataInputStream(in));
    assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
Also used : DataOutputStream(java.io.DataOutputStream) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) IntWritable(org.apache.hadoop.io.IntWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataInputStream(java.io.DataInputStream) FloatWritable(org.apache.hadoop.io.FloatWritable) Random(java.util.Random) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ByteArrayInputStream(java.io.ByteArrayInputStream) LongWritable(org.apache.hadoop.io.LongWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Aggregations

FloatWritable (org.apache.hadoop.io.FloatWritable)111 IntWritable (org.apache.hadoop.io.IntWritable)68 LongWritable (org.apache.hadoop.io.LongWritable)65 BooleanWritable (org.apache.hadoop.io.BooleanWritable)54 Text (org.apache.hadoop.io.Text)51 Test (org.junit.Test)49 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)44 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)40 BytesWritable (org.apache.hadoop.io.BytesWritable)40 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)37 Writable (org.apache.hadoop.io.Writable)28 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)27 ArrayList (java.util.ArrayList)24 Configuration (org.apache.hadoop.conf.Configuration)18 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)18 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)18 Path (org.apache.hadoop.fs.Path)17 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)17 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)17 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)17