Search in sources :

Example 16 with BytesWritable

use of org.apache.hadoop.io.BytesWritable in project hadoop by apache.

the class TestFixedLengthInputFormat method testZeroRecordLength.

/**
   * Test with record length set to 0
   */
@Test(timeout = 5000)
public void testZeroRecordLength() throws Exception {
    localFs.delete(workDir, true);
    Path file = new Path(workDir, new String("testFormat.txt"));
    createFile(file, null, 10, 10);
    Job job = Job.getInstance(defaultConf);
    // Set the fixed length record length config property 
    FixedLengthInputFormat format = new FixedLengthInputFormat();
    format.setRecordLength(job.getConfiguration(), 0);
    FileInputFormat.setInputPaths(job, workDir);
    List<InputSplit> splits = format.getSplits(job);
    boolean exceptionThrown = false;
    for (InputSplit split : splits) {
        try {
            TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration());
            RecordReader<LongWritable, BytesWritable> reader = format.createRecordReader(split, context);
            MapContext<LongWritable, BytesWritable, LongWritable, BytesWritable> mcontext = new MapContextImpl<LongWritable, BytesWritable, LongWritable, BytesWritable>(job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split);
            reader.initialize(split, mcontext);
        } catch (IOException ioe) {
            exceptionThrown = true;
            LOG.info("Exception message:" + ioe.getMessage());
        }
    }
    assertTrue("Exception for zero record length:", exceptionThrown);
}
Also used : Path(org.apache.hadoop.fs.Path) MapContextImpl(org.apache.hadoop.mapreduce.task.MapContextImpl) TaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext) BytesWritable(org.apache.hadoop.io.BytesWritable) IOException(java.io.IOException) LongWritable(org.apache.hadoop.io.LongWritable) Job(org.apache.hadoop.mapreduce.Job) InputSplit(org.apache.hadoop.mapreduce.InputSplit) Test(org.junit.Test)

Example 17 with BytesWritable

use of org.apache.hadoop.io.BytesWritable in project hadoop by apache.

the class TestMRSequenceFileInputFilter method createSequenceFile.

private static void createSequenceFile(int numRecords) throws Exception {
    // create a file with length entries
    SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, inFile, Text.class, BytesWritable.class);
    try {
        for (int i = 1; i <= numRecords; i++) {
            Text key = new Text(Integer.toString(i));
            byte[] data = new byte[random.nextInt(10)];
            random.nextBytes(data);
            BytesWritable value = new BytesWritable(data);
            writer.append(key, value);
        }
    } finally {
        writer.close();
    }
}
Also used : SequenceFile(org.apache.hadoop.io.SequenceFile) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable)

Example 18 with BytesWritable

use of org.apache.hadoop.io.BytesWritable in project hadoop by apache.

the class TestBufferPushPull method testPush.

@Test
public void testPush() throws Exception {
    final byte[] buff = new byte[BUFFER_LENGTH];
    final InputBuffer input = new InputBuffer(buff);
    final OutputBuffer out = new OutputBuffer(buff);
    final Class<BytesWritable> iKClass = BytesWritable.class;
    final Class<BytesWritable> iVClass = BytesWritable.class;
    final RecordWriterForPush writer = new RecordWriterForPush() {

        @Override
        public void write(BytesWritable key, BytesWritable value) throws IOException {
            final KV expect = dataInput[count++];
            Assert.assertEquals(expect.key.toString(), key.toString());
            Assert.assertEquals(expect.value.toString(), value.toString());
        }
    };
    final BufferPushee pushee = new BufferPushee(iKClass, iVClass, writer);
    final PushTarget handler = new PushTarget(out) {

        @Override
        public void sendData() throws IOException {
            final int outputLength = out.length();
            input.rewind(0, outputLength);
            out.rewind();
            pushee.collect(input);
        }
    };
    final BufferPusher pusher = new BufferPusher(iKClass, iVClass, handler);
    writer.reset();
    for (int i = 0; i < INPUT_KV_COUNT; i++) {
        pusher.collect(dataInput[i].key, dataInput[i].value);
    }
    pusher.close();
    pushee.close();
}
Also used : BufferPusher(org.apache.hadoop.mapred.nativetask.handlers.BufferPusher) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) BytesWritable(org.apache.hadoop.io.BytesWritable) KV(org.apache.hadoop.mapred.nativetask.testutil.TestInput.KV) BufferPushee(org.apache.hadoop.mapred.nativetask.handlers.BufferPushee) Test(org.junit.Test)

Example 19 with BytesWritable

use of org.apache.hadoop.io.BytesWritable in project hadoop by apache.

the class TestBufferPushPull method testPull.

@Test
public void testPull() throws Exception {
    final byte[] buff = new byte[BUFFER_LENGTH];
    final InputBuffer input = new InputBuffer(buff);
    final OutputBuffer out = new OutputBuffer(buff);
    final Class<BytesWritable> iKClass = BytesWritable.class;
    final Class<BytesWritable> iVClass = BytesWritable.class;
    final NativeHandlerForPull handler = new NativeHandlerForPull(input, out);
    final KeyValueIterator iter = new KeyValueIterator();
    final BufferPullee pullee = new BufferPullee(iKClass, iVClass, iter, handler);
    handler.setDataLoader(pullee);
    final BufferPuller puller = new BufferPuller(handler);
    handler.setDataReceiver(puller);
    int count = 0;
    while (puller.next()) {
        final DataInputBuffer key = puller.getKey();
        final DataInputBuffer value = puller.getValue();
        final BytesWritable keyBytes = new BytesWritable();
        final BytesWritable valueBytes = new BytesWritable();
        keyBytes.readFields(key);
        valueBytes.readFields(value);
        Assert.assertEquals(dataInput[count].key.toString(), keyBytes.toString());
        Assert.assertEquals(dataInput[count].value.toString(), valueBytes.toString());
        count++;
    }
    puller.close();
    pullee.close();
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) RawKeyValueIterator(org.apache.hadoop.mapred.RawKeyValueIterator) BufferPullee(org.apache.hadoop.mapred.nativetask.handlers.BufferPullee) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) BytesWritable(org.apache.hadoop.io.BytesWritable) BufferPuller(org.apache.hadoop.mapred.nativetask.handlers.BufferPuller) Test(org.junit.Test)

Example 20 with BytesWritable

use of org.apache.hadoop.io.BytesWritable in project hadoop by apache.

the class TestNativeCollectorOnlyHandler method testCollect.

@Test
public void testCollect() throws IOException {
    this.handler = new NativeCollectorOnlyHandler(taskContext, nativeHandler, pusher, combiner);
    handler.collect(new BytesWritable(), new BytesWritable(), 100);
    handler.close();
    handler.close();
    Mockito.verify(pusher, Mockito.times(1)).collect(Matchers.any(BytesWritable.class), Matchers.any(BytesWritable.class), Matchers.anyInt());
    Mockito.verify(pusher, Mockito.times(1)).close();
    Mockito.verify(combiner, Mockito.times(1)).close();
    Mockito.verify(nativeHandler, Mockito.times(1)).close();
}
Also used : BytesWritable(org.apache.hadoop.io.BytesWritable) Test(org.junit.Test)

Aggregations

BytesWritable (org.apache.hadoop.io.BytesWritable)339 Test (org.junit.Test)92 Text (org.apache.hadoop.io.Text)81 LongWritable (org.apache.hadoop.io.LongWritable)66 IntWritable (org.apache.hadoop.io.IntWritable)54 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)51 ArrayList (java.util.ArrayList)48 List (java.util.List)48 Path (org.apache.hadoop.fs.Path)47 IOException (java.io.IOException)42 Configuration (org.apache.hadoop.conf.Configuration)41 FloatWritable (org.apache.hadoop.io.FloatWritable)37 Writable (org.apache.hadoop.io.Writable)36 BooleanWritable (org.apache.hadoop.io.BooleanWritable)35 FileSystem (org.apache.hadoop.fs.FileSystem)28 SequenceFile (org.apache.hadoop.io.SequenceFile)27 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)26 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)26 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)25 Random (java.util.Random)24