Search in sources :

Example 76 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestMerger method readOnDiskMapOutput.

private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path, List<String> keys, List<String> values) throws IOException {
    FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path));
    IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in, fs.getFileStatus(path).getLen(), null, null);
    DataInputBuffer keyBuff = new DataInputBuffer();
    DataInputBuffer valueBuff = new DataInputBuffer();
    Text key = new Text();
    Text value = new Text();
    while (reader.nextRawKey(keyBuff)) {
        key.readFields(keyBuff);
        keys.add(key.toString());
        reader.nextRawValue(valueBuff);
        value.readFields(valueBuff);
        values.add(value.toString());
    }
}
Also used : IFile(org.apache.hadoop.mapred.IFile) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Reader(org.apache.hadoop.mapred.IFile.Reader) Text(org.apache.hadoop.io.Text)

Example 77 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestIFileStreams method testBadLength.

@Test
public void testBadLength() throws Exception {
    final int DLEN = 100;
    DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
    IFileOutputStream ifos = new IFileOutputStream(dob);
    for (int i = 0; i < DLEN; ++i) {
        ifos.write(i);
    }
    ifos.close();
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(dob.getData(), DLEN + 4);
    IFileInputStream ifis = new IFileInputStream(dib, 100, new Configuration());
    int i = 0;
    try {
        while (i < DLEN - 8) {
            assertEquals(i++, ifis.read());
        }
        ifis.close();
    } catch (ChecksumException e) {
        assertEquals("Checksum before close", i, DLEN - 8);
        return;
    }
    fail("Did not detect bad data in checksum");
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) Configuration(org.apache.hadoop.conf.Configuration) ChecksumException(org.apache.hadoop.fs.ChecksumException) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) Test(org.junit.Test)

Example 78 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestIFileStreams method testBadIFileStream.

@Test
public void testBadIFileStream() throws Exception {
    final int DLEN = 100;
    DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
    IFileOutputStream ifos = new IFileOutputStream(dob);
    for (int i = 0; i < DLEN; ++i) {
        ifos.write(i);
    }
    ifos.close();
    DataInputBuffer dib = new DataInputBuffer();
    final byte[] b = dob.getData();
    ++b[17];
    dib.reset(b, DLEN + 4);
    IFileInputStream ifis = new IFileInputStream(dib, 104, new Configuration());
    int i = 0;
    try {
        while (i < DLEN) {
            if (17 == i) {
                assertEquals(18, ifis.read());
            } else {
                assertEquals(i, ifis.read());
            }
            ++i;
        }
        ifis.close();
    } catch (ChecksumException e) {
        assertEquals("Unexpected bad checksum", DLEN - 1, i);
        return;
    }
    fail("Did not detect bad data in checksum");
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) Configuration(org.apache.hadoop.conf.Configuration) ChecksumException(org.apache.hadoop.fs.ChecksumException) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) Test(org.junit.Test)

Example 79 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestCounters method testResetOnDeserialize.

@Test
public void testResetOnDeserialize() throws IOException {
    // Allow only one counterGroup
    Configuration conf = new Configuration();
    conf.setInt(MRJobConfig.COUNTER_GROUPS_MAX_KEY, 1);
    Limits.init(conf);
    Counters countersWithOneGroup = new Counters();
    countersWithOneGroup.findCounter("firstOf1Allowed", "First group");
    boolean caughtExpectedException = false;
    try {
        countersWithOneGroup.findCounter("secondIsTooMany", "Second group");
    } catch (LimitExceededException _) {
        caughtExpectedException = true;
    }
    assertTrue("Did not throw expected exception", caughtExpectedException);
    Counters countersWithZeroGroups = new Counters();
    DataOutputBuffer out = new DataOutputBuffer();
    countersWithZeroGroups.write(out);
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
    countersWithOneGroup.readFields(in);
    // After reset one should be able to add a group
    countersWithOneGroup.findCounter("firstGroupAfterReset", "After reset " + "limit should be set back to zero");
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) Configuration(org.apache.hadoop.conf.Configuration) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) LimitExceededException(org.apache.hadoop.mapreduce.counters.LimitExceededException) Test(org.junit.Test)

Example 80 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestDelegationToken method testDelegationToken.

@SuppressWarnings("deprecation")
@Test
public void testDelegationToken() throws Exception {
    final JobClient client;
    client = user1.doAs(new PrivilegedExceptionAction<JobClient>() {

        @Override
        public JobClient run() throws Exception {
            return new JobClient(cluster.createJobConf());
        }
    });
    final JobClient bobClient;
    bobClient = user2.doAs(new PrivilegedExceptionAction<JobClient>() {

        @Override
        public JobClient run() throws Exception {
            return new JobClient(cluster.createJobConf());
        }
    });
    final Token<DelegationTokenIdentifier> token = client.getDelegationToken(new Text(user1.getUserName()));
    DataInputBuffer inBuf = new DataInputBuffer();
    byte[] bytes = token.getIdentifier();
    inBuf.reset(bytes, bytes.length);
    DelegationTokenIdentifier ident = new DelegationTokenIdentifier();
    ident.readFields(inBuf);
    assertEquals("alice", ident.getUser().getUserName());
    long createTime = ident.getIssueDate();
    long maxTime = ident.getMaxDate();
    long currentTime = System.currentTimeMillis();
    System.out.println("create time: " + createTime);
    System.out.println("current time: " + currentTime);
    System.out.println("max time: " + maxTime);
    assertTrue("createTime < current", createTime < currentTime);
    assertTrue("current < maxTime", currentTime < maxTime);
    // renew should work as user alice
    user1.doAs(new PrivilegedExceptionAction<Void>() {

        @Override
        public Void run() throws Exception {
            client.renewDelegationToken(token);
            client.renewDelegationToken(token);
            return null;
        }
    });
    // bob should fail to renew
    user2.doAs(new PrivilegedExceptionAction<Void>() {

        @Override
        public Void run() throws Exception {
            try {
                bobClient.renewDelegationToken(token);
                Assert.fail("bob renew");
            } catch (AccessControlException ace) {
            // PASS
            }
            return null;
        }
    });
    // bob should fail to cancel
    user2.doAs(new PrivilegedExceptionAction<Void>() {

        @Override
        public Void run() throws Exception {
            try {
                bobClient.cancelDelegationToken(token);
                Assert.fail("bob cancel");
            } catch (AccessControlException ace) {
            // PASS
            }
            return null;
        }
    });
    // alice should be able to cancel but only cancel once
    user1.doAs(new PrivilegedExceptionAction<Void>() {

        @Override
        public Void run() throws Exception {
            client.cancelDelegationToken(token);
            try {
                client.cancelDelegationToken(token);
                Assert.fail("second alice cancel");
            } catch (InvalidToken it) {
            // PASS
            }
            return null;
        }
    });
}
Also used : AccessControlException(org.apache.hadoop.security.AccessControlException) Text(org.apache.hadoop.io.Text) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) JobClient(org.apache.hadoop.mapred.JobClient) AccessControlException(org.apache.hadoop.security.AccessControlException) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) Test(org.junit.Test)

Aggregations

DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)112 Test (org.junit.Test)49 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)45 IOException (java.io.IOException)24 Text (org.apache.hadoop.io.Text)20 Path (org.apache.hadoop.fs.Path)16 Configuration (org.apache.hadoop.conf.Configuration)13 IntWritable (org.apache.hadoop.io.IntWritable)11 Random (java.util.Random)10 DataInputStream (java.io.DataInputStream)9 BufferedInputStream (java.io.BufferedInputStream)8 HashMap (java.util.HashMap)8 DataOutputStream (java.io.DataOutputStream)6 LongWritable (org.apache.hadoop.io.LongWritable)6 SerializationFactory (org.apache.hadoop.io.serializer.SerializationFactory)6 IFile (org.apache.tez.runtime.library.common.sort.impl.IFile)6 BufferedOutputStream (java.io.BufferedOutputStream)5 BytesWritable (org.apache.hadoop.io.BytesWritable)5 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)4 Credentials (org.apache.hadoop.security.Credentials)4