use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.
the class TestCryptoStreams method getInputStream.
@Override
protected InputStream getInputStream(int bufferSize, byte[] key, byte[] iv) throws IOException {
DataInputBuffer in = new DataInputBuffer();
in.reset(buf, 0, bufLen);
return new CryptoInputStream(new FakeInputStream(in), codec, bufferSize, key, iv);
}
use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.
the class TestDelegationToken method testSerialization.
@Test
public void testSerialization() throws Exception {
TestDelegationTokenIdentifier origToken = new TestDelegationTokenIdentifier(new Text("alice"), new Text("bob"), new Text("colin"));
TestDelegationTokenIdentifier newToken = new TestDelegationTokenIdentifier();
origToken.setIssueDate(123);
origToken.setMasterKeyId(321);
origToken.setMaxDate(314);
origToken.setSequenceNumber(12345);
// clone origToken into newToken
DataInputBuffer inBuf = new DataInputBuffer();
DataOutputBuffer outBuf = new DataOutputBuffer();
origToken.write(outBuf);
inBuf.reset(outBuf.getData(), 0, outBuf.getLength());
newToken.readFields(inBuf);
// now test the fields
assertEquals("alice", newToken.getUser().getUserName());
assertEquals(new Text("bob"), newToken.getRenewer());
assertEquals("colin", newToken.getUser().getRealUser().getUserName());
assertEquals(123, newToken.getIssueDate());
assertEquals(321, newToken.getMasterKeyId());
assertEquals(314, newToken.getMaxDate());
assertEquals(12345, newToken.getSequenceNumber());
assertEquals(origToken, newToken);
}
use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.
the class ReduceContextImpl method nextKeyValue.
/**
* Advance to the next key/value pair.
*/
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (!hasMore) {
key = null;
value = null;
return false;
}
firstValue = !nextKeyIsSame;
DataInputBuffer nextKey = input.getKey();
currentRawKey.set(nextKey.getData(), nextKey.getPosition(), nextKey.getLength() - nextKey.getPosition());
buffer.reset(currentRawKey.getBytes(), 0, currentRawKey.getLength());
key = keyDeserializer.deserialize(key);
DataInputBuffer nextVal = input.getValue();
buffer.reset(nextVal.getData(), nextVal.getPosition(), nextVal.getLength() - nextVal.getPosition());
value = valueDeserializer.deserialize(value);
currentKeyLength = nextKey.getLength() - nextKey.getPosition();
currentValueLength = nextVal.getLength() - nextVal.getPosition();
if (isMarked) {
backupStore.write(nextKey, nextVal);
}
hasMore = input.next();
if (hasMore) {
nextKey = input.getKey();
nextKeyIsSame = comparator.compare(currentRawKey.getBytes(), 0, currentRawKey.getLength(), nextKey.getData(), nextKey.getPosition(), nextKey.getLength() - nextKey.getPosition()) == 0;
} else {
nextKeyIsSame = false;
}
inputValueCounter.increment(1);
return true;
}
use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.
the class TestFSCheckpointID method testFSCheckpointIDSerialization.
@Test
public void testFSCheckpointIDSerialization() throws IOException {
Path inpath = new Path("/tmp/blah");
FSCheckpointID cidin = new FSCheckpointID(inpath);
DataOutputBuffer out = new DataOutputBuffer();
cidin.write(out);
out.close();
FSCheckpointID cidout = new FSCheckpointID(null);
DataInputBuffer in = new DataInputBuffer();
in.reset(out.getData(), 0, out.getLength());
cidout.readFields(in);
in.close();
assert cidin.equals(cidout);
}
use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.
the class TestMerger method getValueAnswer.
private Answer<?> getValueAnswer(final String segmentName) {
return new Answer<Void>() {
int i = 0;
public Void answer(InvocationOnMock invocation) {
Object[] args = invocation.getArguments();
DataInputBuffer key = (DataInputBuffer) args[0];
key.reset(("Segment Value " + segmentName + i).getBytes(), 20);
return null;
}
};
}
Aggregations