Search in sources :

Example 96 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestCompressionStreamReuse method resetStateTest.

private void resetStateTest(Configuration conf, int seed, int count, String codecClass) throws IOException {
    // Create the codec
    CompressionCodec codec = null;
    try {
        codec = (CompressionCodec) ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
    } catch (ClassNotFoundException cnfe) {
        throw new IOException("Illegal codec!");
    }
    LOG.info("Created a Codec object of type: " + codecClass);
    // Generate data
    DataOutputBuffer data = new DataOutputBuffer();
    RandomDatum.Generator generator = new RandomDatum.Generator(seed);
    for (int i = 0; i < count; ++i) {
        generator.next();
        RandomDatum key = generator.getKey();
        RandomDatum value = generator.getValue();
        key.write(data);
        value.write(data);
    }
    LOG.info("Generated " + count + " records");
    // Compress data
    DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
    DataOutputStream deflateOut = new DataOutputStream(new BufferedOutputStream(compressedDataBuffer));
    CompressionOutputStream deflateFilter = codec.createOutputStream(deflateOut);
    deflateFilter.write(data.getData(), 0, data.getLength());
    deflateFilter.finish();
    deflateFilter.flush();
    LOG.info("Finished compressing data");
    // reset deflator
    deflateFilter.resetState();
    LOG.info("Finished reseting deflator");
    // re-generate data
    data.reset();
    generator = new RandomDatum.Generator(seed);
    for (int i = 0; i < count; ++i) {
        generator.next();
        RandomDatum key = generator.getKey();
        RandomDatum value = generator.getValue();
        key.write(data);
        value.write(data);
    }
    DataInputBuffer originalData = new DataInputBuffer();
    DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
    originalData.reset(data.getData(), 0, data.getLength());
    // re-compress data
    compressedDataBuffer.reset();
    deflateOut = new DataOutputStream(new BufferedOutputStream(compressedDataBuffer));
    deflateFilter = codec.createOutputStream(deflateOut);
    deflateFilter.write(data.getData(), 0, data.getLength());
    deflateFilter.finish();
    deflateFilter.flush();
    LOG.info("Finished re-compressing data");
    // De-compress data
    DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
    deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, compressedDataBuffer.getLength());
    CompressionInputStream inflateFilter = codec.createInputStream(deCompressedDataBuffer);
    DataInputStream inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));
    // Check
    for (int i = 0; i < count; ++i) {
        RandomDatum k1 = new RandomDatum();
        RandomDatum v1 = new RandomDatum();
        k1.readFields(originalIn);
        v1.readFields(originalIn);
        RandomDatum k2 = new RandomDatum();
        RandomDatum v2 = new RandomDatum();
        k2.readFields(inflateIn);
        v2.readFields(inflateIn);
        assertTrue("original and compressed-then-decompressed-output not equal", k1.equals(k2) && v1.equals(v2));
    }
    LOG.info("SUCCESS! Completed checking " + count + " records");
}
Also used : DataOutputStream(java.io.DataOutputStream) IOException(java.io.IOException) DataInputStream(java.io.DataInputStream) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) BufferedInputStream(java.io.BufferedInputStream) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) BufferedOutputStream(java.io.BufferedOutputStream) RandomDatum(org.apache.hadoop.io.RandomDatum)

Example 97 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestZStandardCompressorDecompressor method testCompressorDecompressorLogicWithCompressionStreams.

// test compress/decompress process through
// CompressionOutputStream/CompressionInputStream api
@Test
public void testCompressorDecompressorLogicWithCompressionStreams() throws Exception {
    DataOutputStream deflateOut = null;
    DataInputStream inflateIn = null;
    int byteSize = 1024 * 100;
    byte[] bytes = generate(byteSize);
    int bufferSize = IO_FILE_BUFFER_SIZE_DEFAULT;
    try {
        DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
        CompressionOutputStream deflateFilter = new CompressorStream(compressedDataBuffer, new ZStandardCompressor(), bufferSize);
        deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
        deflateOut.write(bytes, 0, bytes.length);
        deflateOut.flush();
        deflateFilter.finish();
        DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
        deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0, compressedDataBuffer.getLength());
        CompressionInputStream inflateFilter = new DecompressorStream(deCompressedDataBuffer, new ZStandardDecompressor(bufferSize), bufferSize);
        inflateIn = new DataInputStream(new BufferedInputStream(inflateFilter));
        byte[] result = new byte[byteSize];
        inflateIn.read(result);
        assertArrayEquals("original array not equals compress/decompressed array", result, bytes);
    } finally {
        IOUtils.closeQuietly(deflateOut);
        IOUtils.closeQuietly(inflateIn);
    }
}
Also used : DecompressorStream(org.apache.hadoop.io.compress.DecompressorStream) CompressionOutputStream(org.apache.hadoop.io.compress.CompressionOutputStream) CompressorStream(org.apache.hadoop.io.compress.CompressorStream) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) DataOutputStream(java.io.DataOutputStream) DataInputStream(java.io.DataInputStream) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) BufferedInputStream(java.io.BufferedInputStream) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) BufferedOutputStream(java.io.BufferedOutputStream) Test(org.junit.Test)

Example 98 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestBlockToken method testEmptyLegacyBlockTokenBytesIsLegacy.

@Test
public void testEmptyLegacyBlockTokenBytesIsLegacy() throws IOException {
    BlockTokenIdentifier emptyIdent = new BlockTokenIdentifier();
    DataOutputBuffer dob = new DataOutputBuffer(4096);
    DataInputBuffer dib = new DataInputBuffer();
    emptyIdent.writeLegacy(dob);
    byte[] emptyIdentBytes = Arrays.copyOf(dob.getData(), dob.getLength());
    BlockTokenIdentifier legacyToken = new BlockTokenIdentifier();
    BlockTokenIdentifier protobufToken = new BlockTokenIdentifier();
    BlockTokenIdentifier readToken = new BlockTokenIdentifier();
    dib.reset(emptyIdentBytes, emptyIdentBytes.length);
    legacyToken.readFieldsLegacy(dib);
    boolean invalidProtobufMessage = false;
    try {
        dib.reset(emptyIdentBytes, emptyIdentBytes.length);
        protobufToken.readFieldsProtobuf(dib);
    } catch (IOException e) {
        invalidProtobufMessage = true;
    }
    assertTrue(invalidProtobufMessage);
    dib.reset(emptyIdentBytes, emptyIdentBytes.length);
    readToken.readFields(dib);
    assertTrue(invalidProtobufMessage);
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) IOException(java.io.IOException) Test(org.junit.Test)

Example 99 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestEditsDoubleBuffer method testDoubleBuffer.

@Test
public void testDoubleBuffer() throws IOException {
    EditsDoubleBuffer buf = new EditsDoubleBuffer(1024);
    assertTrue(buf.isFlushed());
    byte[] data = new byte[100];
    buf.writeRaw(data, 0, data.length);
    assertEquals("Should count new data correctly", data.length, buf.countBufferedBytes());
    assertTrue("Writing to current buffer should not affect flush state", buf.isFlushed());
    // Swap the buffers
    buf.setReadyToFlush();
    assertEquals("Swapping buffers should still count buffered bytes", data.length, buf.countBufferedBytes());
    assertFalse(buf.isFlushed());
    // Flush to a stream
    DataOutputBuffer outBuf = new DataOutputBuffer();
    buf.flushTo(outBuf);
    assertEquals(data.length, outBuf.getLength());
    assertTrue(buf.isFlushed());
    assertEquals(0, buf.countBufferedBytes());
    // Write some more
    buf.writeRaw(data, 0, data.length);
    assertEquals("Should count new data correctly", data.length, buf.countBufferedBytes());
    buf.setReadyToFlush();
    buf.flushTo(outBuf);
    assertEquals(data.length * 2, outBuf.getLength());
    assertEquals(0, buf.countBufferedBytes());
    outBuf.close();
}
Also used : DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) Test(org.junit.Test)

Example 100 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestFSCheckpointID method testFSCheckpointIDSerialization.

@Test
public void testFSCheckpointIDSerialization() throws IOException {
    Path inpath = new Path("/tmp/blah");
    FSCheckpointID cidin = new FSCheckpointID(inpath);
    DataOutputBuffer out = new DataOutputBuffer();
    cidin.write(out);
    out.close();
    FSCheckpointID cidout = new FSCheckpointID(null);
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), 0, out.getLength());
    cidout.readFields(in);
    in.close();
    assert cidin.equals(cidout);
}
Also used : Path(org.apache.hadoop.fs.Path) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) Test(org.junit.Test)

Aggregations

DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)128 Test (org.junit.Test)48 DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)36 Credentials (org.apache.hadoop.security.Credentials)36 ByteBuffer (java.nio.ByteBuffer)34 IOException (java.io.IOException)33 Configuration (org.apache.hadoop.conf.Configuration)25 Token (org.apache.hadoop.security.token.Token)24 Path (org.apache.hadoop.fs.Path)20 HashMap (java.util.HashMap)19 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)19 ContainerLaunchContext (org.apache.hadoop.yarn.api.records.ContainerLaunchContext)17 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)16 Random (java.util.Random)15 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)15 DataInputStream (java.io.DataInputStream)14 Text (org.apache.hadoop.io.Text)14 ArrayList (java.util.ArrayList)12 Map (java.util.Map)10 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)10