Search in sources :

Example 76 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hive by apache.

the class DagUtils method createVertex.

private Vertex createVertex(JobConf conf, MergeJoinWork mergeJoinWork, LocalResource appJarLr, List<LocalResource> additionalLr, FileSystem fs, Path mrScratchDir, Context ctx, VertexType vertexType) throws Exception {
    Utilities.setMergeWork(conf, mergeJoinWork, mrScratchDir, false);
    if (mergeJoinWork.getMainWork() instanceof MapWork) {
        List<BaseWork> mapWorkList = mergeJoinWork.getBaseWorkList();
        MapWork mapWork = (MapWork) (mergeJoinWork.getMainWork());
        Vertex mergeVx = createVertex(conf, mapWork, appJarLr, additionalLr, fs, mrScratchDir, ctx, vertexType);
        conf.setClass("mapred.input.format.class", HiveInputFormat.class, InputFormat.class);
        // mapreduce.tez.input.initializer.serialize.event.payload should be set
        // to false when using this plug-in to avoid getting a serialized event at run-time.
        conf.setBoolean("mapreduce.tez.input.initializer.serialize.event.payload", false);
        for (int i = 0; i < mapWorkList.size(); i++) {
            mapWork = (MapWork) (mapWorkList.get(i));
            conf.set(TEZ_MERGE_CURRENT_MERGE_FILE_PREFIX, mapWork.getName());
            conf.set(Utilities.INPUT_NAME, mapWork.getName());
            LOG.info("Going through each work and adding MultiMRInput");
            mergeVx.addDataSource(mapWork.getName(), MultiMRInput.createConfigBuilder(conf, HiveInputFormat.class).build());
        }
        VertexManagerPluginDescriptor desc = VertexManagerPluginDescriptor.create(CustomPartitionVertex.class.getName());
        // the +1 to the size is because of the main work.
        CustomVertexConfiguration vertexConf = new CustomVertexConfiguration(mergeJoinWork.getMergeJoinOperator().getConf().getNumBuckets(), vertexType, mergeJoinWork.getBigTableAlias(), mapWorkList.size() + 1);
        DataOutputBuffer dob = new DataOutputBuffer();
        vertexConf.write(dob);
        byte[] userPayload = dob.getData();
        desc.setUserPayload(UserPayload.create(ByteBuffer.wrap(userPayload)));
        mergeVx.setVertexManagerPlugin(desc);
        return mergeVx;
    } else {
        Vertex mergeVx = createVertex(conf, (ReduceWork) mergeJoinWork.getMainWork(), appJarLr, additionalLr, fs, mrScratchDir, ctx);
        return mergeVx;
    }
}
Also used : Vertex(org.apache.tez.dag.api.Vertex) PreWarmVertex(org.apache.tez.dag.api.PreWarmVertex) MapWork(org.apache.hadoop.hive.ql.plan.MapWork) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) VertexManagerPluginDescriptor(org.apache.tez.dag.api.VertexManagerPluginDescriptor) BaseWork(org.apache.hadoop.hive.ql.plan.BaseWork)

Example 77 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hive by apache.

the class LlapTaskCommunicator method serializeCredentials.

private ByteBuffer serializeCredentials(Credentials credentials) throws IOException {
    Credentials containerCredentials = new Credentials();
    containerCredentials.addAll(credentials);
    DataOutputBuffer containerTokens_dob = new DataOutputBuffer();
    containerCredentials.writeTokenStorageToStream(containerTokens_dob);
    return ByteBuffer.wrap(containerTokens_dob.getData(), 0, containerTokens_dob.getLength());
}
Also used : DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) Credentials(org.apache.hadoop.security.Credentials)

Example 78 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hive by apache.

the class LlapBaseInputFormat method serializeCredentials.

private ByteBuffer serializeCredentials(Credentials credentials) throws IOException {
    Credentials containerCredentials = new Credentials();
    containerCredentials.addAll(credentials);
    DataOutputBuffer containerTokens_dob = new DataOutputBuffer();
    containerCredentials.writeTokenStorageToStream(containerTokens_dob);
    return ByteBuffer.wrap(containerTokens_dob.getData(), 0, containerTokens_dob.getLength());
}
Also used : DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) Credentials(org.apache.hadoop.security.Credentials)

Example 79 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hive by apache.

the class AppMasterEventOperator method initDataBuffer.

protected void initDataBuffer(boolean skipPruning) throws HiveException {
    buffer = new DataOutputBuffer();
    try {
        // add any other header info
        getConf().writeEventHeader(buffer);
        // write byte to say whether to skip pruning or not
        buffer.writeBoolean(skipPruning);
    } catch (IOException e) {
        throw new HiveException(e);
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) IOException(java.io.IOException)

Example 80 with DataOutputBuffer

use of org.apache.hadoop.io.DataOutputBuffer in project hadoop by apache.

the class TestAppManager method testRMAppSubmitWithInvalidTokens.

@Test
public void testRMAppSubmitWithInvalidTokens() throws Exception {
    // Setup invalid security tokens
    DataOutputBuffer dob = new DataOutputBuffer();
    ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
    Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    asContext.getAMContainerSpec().setTokens(securityTokens);
    try {
        appMonitor.submitApplication(asContext, "test");
        Assert.fail("Application submission should fail because" + " Tokens are invalid.");
    } catch (YarnException e) {
        // Exception is expected
        Assert.assertTrue("The thrown exception is not" + " java.io.EOFException", e.getMessage().contains("java.io.EOFException"));
    }
    int timeoutSecs = 0;
    while ((getAppEventType() == RMAppEventType.KILL) && timeoutSecs++ < 20) {
        Thread.sleep(1000);
    }
    Assert.assertEquals("app event type sent is wrong", RMAppEventType.APP_REJECTED, getAppEventType());
    asContext.getAMContainerSpec().setTokens(null);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) ByteBuffer(java.nio.ByteBuffer) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) Test(org.junit.Test)

Aggregations

DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)132 Test (org.junit.Test)48 Credentials (org.apache.hadoop.security.Credentials)37 ByteBuffer (java.nio.ByteBuffer)36 DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)36 IOException (java.io.IOException)34 Configuration (org.apache.hadoop.conf.Configuration)25 Token (org.apache.hadoop.security.token.Token)25 Path (org.apache.hadoop.fs.Path)21 HashMap (java.util.HashMap)20 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)20 ContainerLaunchContext (org.apache.hadoop.yarn.api.records.ContainerLaunchContext)18 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)16 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)16 Random (java.util.Random)15 DataInputStream (java.io.DataInputStream)14 Text (org.apache.hadoop.io.Text)14 ArrayList (java.util.ArrayList)13 Map (java.util.Map)10 FileSystem (org.apache.hadoop.fs.FileSystem)10