Search in sources :

Example 56 with SessionState

use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.

the class TestAddResource method testDuplicateAdds.

// add same jar multiple times and check that dependencies are added only once.
@Test
public void testDuplicateAdds() throws URISyntaxException, IOException {
    SessionState ss = Mockito.spy(SessionState.start(conf).get());
    String query = "testQuery";
    List<URI> list = new LinkedList<URI>();
    List<String> addList = new LinkedList<String>();
    list.add(createURI(TEST_JAR_DIR + "testjar1.jar"));
    list.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
    list.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
    list.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
    list.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
    Collections.sort(list);
    Mockito.when(ss.resolveAndDownload(t, query, false)).thenReturn(list);
    for (int i = 0; i < 10; i++) {
        addList.add(query);
    }
    ss.add_resources(t, addList);
    Set<String> dependencies = ss.list_resource(t, null);
    LinkedList<URI> actual = new LinkedList<URI>();
    for (String dependency : dependencies) {
        actual.add(createURI(dependency));
    }
    Collections.sort(actual);
    assertEquals(list, actual);
    ss.close();
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) URI(java.net.URI) LinkedList(java.util.LinkedList) Test(org.junit.Test)

Example 57 with SessionState

use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.

the class TestAddResource method testDeleteJarMultiple.

// same test as above but with 3 jars sharing dependencies
@Test
public void testDeleteJarMultiple() throws URISyntaxException, IOException {
    SessionState ss = Mockito.spy(SessionState.start(conf).get());
    String query1 = "testQuery1";
    String query2 = "testQuery2";
    String query3 = "testQuery3";
    List<URI> list1 = new LinkedList<URI>();
    List<URI> list2 = new LinkedList<URI>();
    List<URI> list3 = new LinkedList<URI>();
    List<String> addList = new LinkedList<String>();
    list1.add(createURI(TEST_JAR_DIR + "testjar1.jar"));
    list1.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
    list1.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
    list1.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
    list2.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
    list2.add(createURI(TEST_JAR_DIR + "testjar3.jar"));
    list2.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
    list3.add(createURI(TEST_JAR_DIR + "testjar4.jar"));
    list3.add(createURI(TEST_JAR_DIR + "testjar2.jar"));
    list3.add(createURI(TEST_JAR_DIR + "testjar5.jar"));
    Collections.sort(list1);
    Collections.sort(list2);
    Collections.sort(list3);
    Mockito.when(ss.resolveAndDownload(t, query1, false)).thenReturn(list1);
    Mockito.when(ss.resolveAndDownload(t, query2, false)).thenReturn(list2);
    Mockito.when(ss.resolveAndDownload(t, query3, false)).thenReturn(list3);
    addList.add(query1);
    addList.add(query2);
    addList.add(query3);
    ss.add_resources(t, addList);
    List<String> deleteList = new LinkedList<String>();
    deleteList.add(list1.get(0).toString());
    // delete jar added using query1
    ss.delete_resources(t, deleteList);
    Set<String> dependencies = ss.list_resource(t, null);
    LinkedList<URI> actual = new LinkedList<URI>();
    for (String dependency : dependencies) {
        actual.add(createURI(dependency));
    }
    List<URI> expected = union(list2, list3);
    Collections.sort(expected);
    Collections.sort(actual);
    assertEquals(expected, actual);
    actual.clear();
    expected.clear();
    deleteList.clear();
    deleteList.add(list2.get(0).toString());
    // delete jars added using query2
    ss.delete_resources(t, deleteList);
    dependencies = ss.list_resource(t, null);
    actual = new LinkedList<URI>();
    for (String dependency : dependencies) {
        actual.add(createURI(dependency));
    }
    expected = new LinkedList<URI>(list3);
    Collections.sort(expected);
    Collections.sort(actual);
    assertEquals(expected, actual);
    actual.clear();
    expected.clear();
    // delete remaining jars
    deleteList.clear();
    deleteList.add(list3.get(0).toString());
    ss.delete_resources(t, deleteList);
    dependencies = ss.list_resource(t, null);
    assertEquals(dependencies.isEmpty(), true);
    ss.close();
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) URI(java.net.URI) LinkedList(java.util.LinkedList) Test(org.junit.Test)

Example 58 with SessionState

use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.

the class AuthorizationMetaStoreFilterHook method getFilteredObjects.

private List<HivePrivilegeObject> getFilteredObjects(List<HivePrivilegeObject> listObjs) throws MetaException {
    SessionState ss = SessionState.get();
    HiveAuthzContext.Builder authzContextBuilder = new HiveAuthzContext.Builder();
    authzContextBuilder.setUserIpAddress(ss.getUserIpAddress());
    authzContextBuilder.setForwardedAddresses(ss.getForwardedAddresses());
    try {
        return ss.getAuthorizerV2().filterListCmdObjects(listObjs, authzContextBuilder.build());
    } catch (HiveAuthzPluginException e) {
        LOG.error("Authorization error", e);
        throw new MetaException(e.getMessage());
    } catch (HiveAccessControlException e) {
        // authorization error is not really expected in a filter call
        // the impl should have just filtered out everything. A checkPrivileges call
        // would have already been made to authorize this action
        LOG.error("AccessControlException", e);
        throw new MetaException(e.getMessage());
    }
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 59 with SessionState

use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.

the class TezSessionPoolManager method returnSession.

public void returnSession(TezSessionState tezSessionState, boolean llap) throws Exception {
    // Ignore the interrupt status while returning the session, but set it back
    // on the thread in case anything else needs to deal with it.
    boolean isInterrupted = Thread.interrupted();
    try {
        if (isInterrupted) {
            LOG.info("returnSession invoked with interrupt status set");
        }
        if (llap && (this.numConcurrentLlapQueries > 0)) {
            llapQueue.release();
        }
        if (tezSessionState.isDefault() && tezSessionState instanceof TezSessionPoolSession) {
            LOG.info("The session " + tezSessionState.getSessionId() + " belongs to the pool. Put it back in");
            SessionState sessionState = SessionState.get();
            if (sessionState != null) {
                sessionState.setTezSession(null);
            }
            TezSessionPoolSession poolSession = (TezSessionPoolSession) tezSessionState;
            if (poolSession.returnAfterUse()) {
                defaultQueuePool.put(poolSession);
            }
        }
    // non default session nothing changes. The user can continue to use the existing
    // session in the SessionState
    } finally {
        // Reset the interrupt status.
        if (isInterrupted) {
            Thread.currentThread().interrupt();
        }
    }
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState)

Example 60 with SessionState

use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.

the class SemanticAnalyzer method genValuesTempTable.

/**
   * Generate a temp table out of a values clause
   * See also {@link #preProcessForInsert(ASTNode, QB)}
   */
private ASTNode genValuesTempTable(ASTNode originalFrom, QB qb) throws SemanticException {
    Path dataDir = null;
    if (!qb.getEncryptedTargetTablePaths().isEmpty()) {
        //currently only Insert into T values(...) is supported thus only 1 values clause
        //and only 1 target table are possible.  If/when support for
        //select ... from values(...) is added an insert statement may have multiple
        //encrypted target tables.
        dataDir = ctx.getMRTmpPath(qb.getEncryptedTargetTablePaths().get(0).toUri());
    }
    // Pick a name for the table
    SessionState ss = SessionState.get();
    String tableName = VALUES_TMP_TABLE_NAME_PREFIX + ss.getNextValuesTempTableSuffix();
    // Step 1, parse the values clause we were handed
    List<? extends Node> fromChildren = originalFrom.getChildren();
    // First child should be the virtual table ref
    ASTNode virtualTableRef = (ASTNode) fromChildren.get(0);
    assert virtualTableRef.getToken().getType() == HiveParser.TOK_VIRTUAL_TABREF : "Expected first child of TOK_VIRTUAL_TABLE to be TOK_VIRTUAL_TABREF but was " + virtualTableRef.getName();
    List<? extends Node> virtualTableRefChildren = virtualTableRef.getChildren();
    // First child of this should be the table name.  If it's anonymous,
    // then we don't have a table name.
    ASTNode tabName = (ASTNode) virtualTableRefChildren.get(0);
    if (tabName.getToken().getType() != HiveParser.TOK_ANONYMOUS) {
        // you need to parse this list of columns names and build it into the table
        throw new SemanticException(ErrorMsg.VALUES_TABLE_CONSTRUCTOR_NOT_SUPPORTED.getMsg());
    }
    // The second child of the TOK_VIRTUAL_TABLE should be TOK_VALUES_TABLE
    ASTNode valuesTable = (ASTNode) fromChildren.get(1);
    assert valuesTable.getToken().getType() == HiveParser.TOK_VALUES_TABLE : "Expected second child of TOK_VIRTUAL_TABLE to be TOK_VALUE_TABLE but was " + valuesTable.getName();
    // Each of the children of TOK_VALUES_TABLE will be a TOK_VALUE_ROW
    List<? extends Node> valuesTableChildren = valuesTable.getChildren();
    // Now that we're going to start reading through the rows, open a file to write the rows too
    // If we leave this method before creating the temporary table we need to be sure to clean up
    // this file.
    Path tablePath = null;
    FileSystem fs = null;
    FSDataOutputStream out = null;
    try {
        if (dataDir == null) {
            tablePath = Warehouse.getDnsPath(new Path(ss.getTempTableSpace(), tableName), conf);
        } else {
            //if target table of insert is encrypted, make sure temporary table data is stored
            //similarly encrypted
            tablePath = Warehouse.getDnsPath(new Path(dataDir, tableName), conf);
        }
        fs = tablePath.getFileSystem(conf);
        fs.mkdirs(tablePath);
        Path dataFile = new Path(tablePath, "data_file");
        out = fs.create(dataFile);
        List<FieldSchema> fields = new ArrayList<FieldSchema>();
        boolean firstRow = true;
        for (Node n : valuesTableChildren) {
            ASTNode valuesRow = (ASTNode) n;
            assert valuesRow.getToken().getType() == HiveParser.TOK_VALUE_ROW : "Expected child of TOK_VALUE_TABLE to be TOK_VALUE_ROW but was " + valuesRow.getName();
            // Each of the children of this should be a literal
            List<? extends Node> valuesRowChildren = valuesRow.getChildren();
            boolean isFirst = true;
            int nextColNum = 1;
            for (Node n1 : valuesRowChildren) {
                ASTNode value = (ASTNode) n1;
                if (firstRow) {
                    fields.add(new FieldSchema("tmp_values_col" + nextColNum++, "string", ""));
                }
                if (isFirst)
                    isFirst = false;
                else
                    writeAsText("", out);
                writeAsText(unparseExprForValuesClause(value), out);
            }
            writeAsText("\n", out);
            firstRow = false;
        }
        // Step 2, create a temp table, using the created file as the data
        StorageFormat format = new StorageFormat(conf);
        format.processStorageFormat("TextFile");
        Table table = db.newTable(tableName);
        table.setSerializationLib(format.getSerde());
        table.setFields(fields);
        table.setDataLocation(tablePath);
        table.getTTable().setTemporary(true);
        table.setStoredAsSubDirectories(false);
        table.setInputFormatClass(format.getInputFormat());
        table.setOutputFormatClass(format.getOutputFormat());
        db.createTable(table, false);
    } catch (Exception e) {
        String errMsg = ErrorMsg.INSERT_CANNOT_CREATE_TEMP_FILE.getMsg() + e.getMessage();
        LOG.error(errMsg);
        // Try to delete the file
        if (fs != null && tablePath != null) {
            try {
                fs.delete(tablePath, false);
            } catch (IOException swallowIt) {
            }
        }
        throw new SemanticException(errMsg, e);
    } finally {
        IOUtils.closeStream(out);
    }
    // Step 3, return a new subtree with a from clause built around that temp table
    // The form of the tree is TOK_TABREF->TOK_TABNAME->identifier(tablename)
    Token t = new ClassicToken(HiveParser.TOK_TABREF);
    ASTNode tabRef = new ASTNode(t);
    t = new ClassicToken(HiveParser.TOK_TABNAME);
    ASTNode tabNameNode = new ASTNode(t);
    tabRef.addChild(tabNameNode);
    t = new ClassicToken(HiveParser.Identifier, tableName);
    ASTNode identifier = new ASTNode(t);
    tabNameNode.addChild(identifier);
    return tabRef;
}
Also used : Path(org.apache.hadoop.fs.Path) SessionState(org.apache.hadoop.hive.ql.session.SessionState) Table(org.apache.hadoop.hive.ql.metadata.Table) ClassicToken(org.antlr.runtime.ClassicToken) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Node(org.apache.hadoop.hive.ql.lib.Node) RelNode(org.apache.calcite.rel.RelNode) ArrayList(java.util.ArrayList) Token(org.antlr.runtime.Token) ClassicToken(org.antlr.runtime.ClassicToken) CommonToken(org.antlr.runtime.CommonToken) IOException(java.io.IOException) IOException(java.io.IOException) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) PatternSyntaxException(java.util.regex.PatternSyntaxException) FileNotFoundException(java.io.FileNotFoundException) AccessControlException(java.security.AccessControlException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) FileSystem(org.apache.hadoop.fs.FileSystem) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)

Aggregations

SessionState (org.apache.hadoop.hive.ql.session.SessionState)112 IOException (java.io.IOException)28 HiveConf (org.apache.hadoop.hive.conf.HiveConf)22 ArrayList (java.util.ArrayList)14 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)14 Path (org.apache.hadoop.fs.Path)13 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)13 LinkedList (java.util.LinkedList)12 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)12 File (java.io.File)11 FileNotFoundException (java.io.FileNotFoundException)11 Map (java.util.Map)11 Test (org.junit.Test)10 PrintStream (java.io.PrintStream)9 ExecutionException (java.util.concurrent.ExecutionException)9 HashMap (java.util.HashMap)8 LinkedHashMap (java.util.LinkedHashMap)7 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)7 ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)6 Callable (java.util.concurrent.Callable)6