use of org.apache.hadoop.io.BytesWritable in project hadoop by apache.
the class TestFixedLengthInputFormat method testZeroRecordLength.
/**
* Test with record length set to 0
*/
@Test(timeout = 5000)
public void testZeroRecordLength() throws Exception {
localFs.delete(workDir, true);
Path file = new Path(workDir, new String("testFormat.txt"));
createFile(file, null, 10, 10);
Job job = Job.getInstance(defaultConf);
// Set the fixed length record length config property
FixedLengthInputFormat format = new FixedLengthInputFormat();
format.setRecordLength(job.getConfiguration(), 0);
FileInputFormat.setInputPaths(job, workDir);
List<InputSplit> splits = format.getSplits(job);
boolean exceptionThrown = false;
for (InputSplit split : splits) {
try {
TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration());
RecordReader<LongWritable, BytesWritable> reader = format.createRecordReader(split, context);
MapContext<LongWritable, BytesWritable, LongWritable, BytesWritable> mcontext = new MapContextImpl<LongWritable, BytesWritable, LongWritable, BytesWritable>(job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split);
reader.initialize(split, mcontext);
} catch (IOException ioe) {
exceptionThrown = true;
LOG.info("Exception message:" + ioe.getMessage());
}
}
assertTrue("Exception for zero record length:", exceptionThrown);
}
use of org.apache.hadoop.io.BytesWritable in project hadoop by apache.
the class TestMRSequenceFileInputFilter method createSequenceFile.
private static void createSequenceFile(int numRecords) throws Exception {
// create a file with length entries
SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, inFile, Text.class, BytesWritable.class);
try {
for (int i = 1; i <= numRecords; i++) {
Text key = new Text(Integer.toString(i));
byte[] data = new byte[random.nextInt(10)];
random.nextBytes(data);
BytesWritable value = new BytesWritable(data);
writer.append(key, value);
}
} finally {
writer.close();
}
}
use of org.apache.hadoop.io.BytesWritable in project hadoop by apache.
the class TestBufferPushPull method testPush.
@Test
public void testPush() throws Exception {
final byte[] buff = new byte[BUFFER_LENGTH];
final InputBuffer input = new InputBuffer(buff);
final OutputBuffer out = new OutputBuffer(buff);
final Class<BytesWritable> iKClass = BytesWritable.class;
final Class<BytesWritable> iVClass = BytesWritable.class;
final RecordWriterForPush writer = new RecordWriterForPush() {
@Override
public void write(BytesWritable key, BytesWritable value) throws IOException {
final KV expect = dataInput[count++];
Assert.assertEquals(expect.key.toString(), key.toString());
Assert.assertEquals(expect.value.toString(), value.toString());
}
};
final BufferPushee pushee = new BufferPushee(iKClass, iVClass, writer);
final PushTarget handler = new PushTarget(out) {
@Override
public void sendData() throws IOException {
final int outputLength = out.length();
input.rewind(0, outputLength);
out.rewind();
pushee.collect(input);
}
};
final BufferPusher pusher = new BufferPusher(iKClass, iVClass, handler);
writer.reset();
for (int i = 0; i < INPUT_KV_COUNT; i++) {
pusher.collect(dataInput[i].key, dataInput[i].value);
}
pusher.close();
pushee.close();
}
use of org.apache.hadoop.io.BytesWritable in project hadoop by apache.
the class TestBufferPushPull method testPull.
@Test
public void testPull() throws Exception {
final byte[] buff = new byte[BUFFER_LENGTH];
final InputBuffer input = new InputBuffer(buff);
final OutputBuffer out = new OutputBuffer(buff);
final Class<BytesWritable> iKClass = BytesWritable.class;
final Class<BytesWritable> iVClass = BytesWritable.class;
final NativeHandlerForPull handler = new NativeHandlerForPull(input, out);
final KeyValueIterator iter = new KeyValueIterator();
final BufferPullee pullee = new BufferPullee(iKClass, iVClass, iter, handler);
handler.setDataLoader(pullee);
final BufferPuller puller = new BufferPuller(handler);
handler.setDataReceiver(puller);
int count = 0;
while (puller.next()) {
final DataInputBuffer key = puller.getKey();
final DataInputBuffer value = puller.getValue();
final BytesWritable keyBytes = new BytesWritable();
final BytesWritable valueBytes = new BytesWritable();
keyBytes.readFields(key);
valueBytes.readFields(value);
Assert.assertEquals(dataInput[count].key.toString(), keyBytes.toString());
Assert.assertEquals(dataInput[count].value.toString(), valueBytes.toString());
count++;
}
puller.close();
pullee.close();
}
use of org.apache.hadoop.io.BytesWritable in project hadoop by apache.
the class TestNativeCollectorOnlyHandler method testCollect.
@Test
public void testCollect() throws IOException {
this.handler = new NativeCollectorOnlyHandler(taskContext, nativeHandler, pusher, combiner);
handler.collect(new BytesWritable(), new BytesWritable(), 100);
handler.close();
handler.close();
Mockito.verify(pusher, Mockito.times(1)).collect(Matchers.any(BytesWritable.class), Matchers.any(BytesWritable.class), Matchers.anyInt());
Mockito.verify(pusher, Mockito.times(1)).close();
Mockito.verify(combiner, Mockito.times(1)).close();
Mockito.verify(nativeHandler, Mockito.times(1)).close();
}
Aggregations