Search in sources :

Example 51 with SessionState

use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.

the class Utilities method getResourceFiles.

public static String getResourceFiles(Configuration conf, SessionState.ResourceType t) {
    // fill in local files (includes copy of HDFS files) to be added to the task environment
    SessionState ss = SessionState.get();
    Set<String> files = (ss == null) ? null : ss.list_resource(t, null);
    return validateFiles(conf, files);
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState)

Example 52 with SessionState

use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.

the class Driver method doAuthorization.

/**
 * Do authorization using post semantic analysis information in the semantic analyzer
 * The original command is also passed so that authorization interface can provide
 * more useful information in logs.
 * @param sem SemanticAnalyzer used to parse input query
 * @param command input query
 * @throws HiveException
 * @throws AuthorizationException
 */
public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, String command) throws HiveException, AuthorizationException {
    SessionState ss = SessionState.get();
    Hive db = sem.getDb();
    Set<ReadEntity> additionalInputs = new HashSet<ReadEntity>();
    for (Entity e : sem.getInputs()) {
        if (e.getType() == Entity.Type.PARTITION) {
            additionalInputs.add(new ReadEntity(e.getTable()));
        }
    }
    Set<WriteEntity> additionalOutputs = new HashSet<WriteEntity>();
    for (WriteEntity e : sem.getOutputs()) {
        if (e.getType() == Entity.Type.PARTITION) {
            additionalOutputs.add(new WriteEntity(e.getTable(), e.getWriteType()));
        }
    }
    // The following union operation returns a union, which traverses over the
    // first set once and then  then over each element of second set, in order,
    // that is not contained in first. This means it doesn't replace anything
    // in first set, and would preserve the WriteType in WriteEntity in first
    // set in case of outputs list.
    Set<ReadEntity> inputs = Sets.union(sem.getInputs(), additionalInputs);
    Set<WriteEntity> outputs = Sets.union(sem.getOutputs(), additionalOutputs);
    if (ss.isAuthorizationModeV2()) {
        // get mapping of tables to columns used
        ColumnAccessInfo colAccessInfo = sem.getColumnAccessInfo();
        // colAccessInfo is set only in case of SemanticAnalyzer
        Map<String, List<String>> selectTab2Cols = colAccessInfo != null ? colAccessInfo.getTableToColumnAccessMap() : null;
        Map<String, List<String>> updateTab2Cols = sem.getUpdateColumnAccessInfo() != null ? sem.getUpdateColumnAccessInfo().getTableToColumnAccessMap() : null;
        doAuthorizationV2(ss, op, inputs, outputs, command, selectTab2Cols, updateTab2Cols);
        return;
    }
    if (op == null) {
        throw new HiveException("Operation should not be null");
    }
    HiveAuthorizationProvider authorizer = ss.getAuthorizer();
    if (op.equals(HiveOperation.CREATEDATABASE)) {
        authorizer.authorize(op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges());
    } else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) {
        authorizer.authorize(db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
    } else {
        if (op.equals(HiveOperation.IMPORT)) {
            ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
            if (!isa.existsTable()) {
                authorizer.authorize(db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
            }
        }
    }
    if (outputs != null && outputs.size() > 0) {
        for (WriteEntity write : outputs) {
            if (write.isDummy() || write.isPathType()) {
                continue;
            }
            if (write.getType() == Entity.Type.DATABASE) {
                if (!op.equals(HiveOperation.IMPORT)) {
                    // We skip DB check for import here because we already handle it above
                    // as a CTAS check.
                    authorizer.authorize(write.getDatabase(), null, op.getOutputRequiredPrivileges());
                }
                continue;
            }
            if (write.getType() == WriteEntity.Type.PARTITION) {
                Partition part = db.getPartition(write.getTable(), write.getPartition().getSpec(), false);
                if (part != null) {
                    authorizer.authorize(write.getPartition(), null, op.getOutputRequiredPrivileges());
                    continue;
                }
            }
            if (write.getTable() != null) {
                authorizer.authorize(write.getTable(), null, op.getOutputRequiredPrivileges());
            }
        }
    }
    if (inputs != null && inputs.size() > 0) {
        Map<Table, List<String>> tab2Cols = new HashMap<Table, List<String>>();
        Map<Partition, List<String>> part2Cols = new HashMap<Partition, List<String>>();
        // determine if partition level privileges should be checked for input tables
        Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
        for (ReadEntity read : inputs) {
            if (read.isDummy() || read.isPathType() || read.getType() == Entity.Type.DATABASE) {
                continue;
            }
            Table tbl = read.getTable();
            if ((read.getPartition() != null) || (tbl != null && tbl.isPartitioned())) {
                String tblName = tbl.getTableName();
                if (tableUsePartLevelAuth.get(tblName) == null) {
                    boolean usePartLevelPriv = (tbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE".equalsIgnoreCase(tbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))));
                    if (usePartLevelPriv) {
                        tableUsePartLevelAuth.put(tblName, Boolean.TRUE);
                    } else {
                        tableUsePartLevelAuth.put(tblName, Boolean.FALSE);
                    }
                }
            }
        }
        // column authorization is checked through table scan operators.
        getTablePartitionUsedColumns(op, sem, tab2Cols, part2Cols, tableUsePartLevelAuth);
        // cache the results for table authorization
        Set<String> tableAuthChecked = new HashSet<String>();
        for (ReadEntity read : inputs) {
            // if read is not direct, we do not need to check its autho.
            if (read.isDummy() || read.isPathType() || !read.isDirect()) {
                continue;
            }
            if (read.getType() == Entity.Type.DATABASE) {
                authorizer.authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null);
                continue;
            }
            Table tbl = read.getTable();
            if (tbl.isView() && sem instanceof SemanticAnalyzer) {
                tab2Cols.put(tbl, sem.getColumnAccessInfo().getTableToColumnAccessMap().get(tbl.getCompleteName()));
            }
            if (read.getPartition() != null) {
                Partition partition = read.getPartition();
                tbl = partition.getTable();
                // use partition level authorization
                if (Boolean.TRUE.equals(tableUsePartLevelAuth.get(tbl.getTableName()))) {
                    List<String> cols = part2Cols.get(partition);
                    if (cols != null && cols.size() > 0) {
                        authorizer.authorize(partition.getTable(), partition, cols, op.getInputRequiredPrivileges(), null);
                    } else {
                        authorizer.authorize(partition, op.getInputRequiredPrivileges(), null);
                    }
                    continue;
                }
            }
            // partitions
            if (tbl != null && !tableAuthChecked.contains(tbl.getTableName()) && !(Boolean.TRUE.equals(tableUsePartLevelAuth.get(tbl.getTableName())))) {
                List<String> cols = tab2Cols.get(tbl);
                if (cols != null && cols.size() > 0) {
                    authorizer.authorize(tbl, null, cols, op.getInputRequiredPrivileges(), null);
                } else {
                    authorizer.authorize(tbl, op.getInputRequiredPrivileges(), null);
                }
                tableAuthChecked.add(tbl.getTableName());
            }
        }
    }
}
Also used : HiveAuthorizationProvider(org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider) SessionState(org.apache.hadoop.hive.ql.session.SessionState) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Entity(org.apache.hadoop.hive.ql.hooks.Entity) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ImportSemanticAnalyzer(org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer) PrunedPartitionList(org.apache.hadoop.hive.ql.parse.PrunedPartitionList) ArrayList(java.util.ArrayList) ValidTxnWriteIdList(org.apache.hadoop.hive.common.ValidTxnWriteIdList) ValidTxnList(org.apache.hadoop.hive.common.ValidTxnList) ValidWriteIdList(org.apache.hadoop.hive.common.ValidWriteIdList) List(java.util.List) LinkedList(java.util.LinkedList) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) LinkedHashSet(java.util.LinkedHashSet) HashSet(java.util.HashSet) Partition(org.apache.hadoop.hive.ql.metadata.Partition) Table(org.apache.hadoop.hive.ql.metadata.Table) SemanticAnalyzer(org.apache.hadoop.hive.ql.parse.SemanticAnalyzer) BaseSemanticAnalyzer(org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer) ImportSemanticAnalyzer(org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer) ColumnAccessInfo(org.apache.hadoop.hive.ql.parse.ColumnAccessInfo) ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Hive(org.apache.hadoop.hive.ql.metadata.Hive)

Example 53 with SessionState

use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.

the class TestSessionUserName method testSessionDefaultUser.

/**
 * Test if the authorization factory gets the username provided by
 * the authenticator, if SesstionState is created without username
 * @throws Exception
 */
@Test
public void testSessionDefaultUser() throws Exception {
    SessionState ss = new SessionState(getAuthV2HiveConf());
    setupDataNucleusFreeHive(ss.getConf());
    SessionState.start(ss);
    Assert.assertEquals("check username", ss.getAuthenticator().getUserName(), HiveAuthorizerStoringUserNameFactory.username);
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) Test(org.junit.Test)

Example 54 with SessionState

use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.

the class TestResetProcessor method createMockSparkSessionState.

private static SessionState createMockSparkSessionState() {
    SessionState mockSessionState = mock(SessionState.class);
    Map<String, String> overriddenConfigurations = new HashMap<>();
    overriddenConfigurations.put(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname, "spark");
    when(mockSessionState.getOverriddenConfigurations()).thenReturn(overriddenConfigurations);
    when(mockSessionState.getConf()).thenReturn(new HiveConf());
    return mockSessionState;
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) HashMap(java.util.HashMap) HiveConf(org.apache.hadoop.hive.conf.HiveConf)

Example 55 with SessionState

use of org.apache.hadoop.hive.ql.session.SessionState in project hive by apache.

the class TestResetProcessor method testResetExecutionEngineClosesSparkSession.

@Test
public void testResetExecutionEngineClosesSparkSession() throws Exception {
    SessionState mockSessionState = createMockSparkSessionState();
    new ResetProcessor().run(mockSessionState, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname);
    verify(mockSessionState).closeSparkSession();
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) Test(org.junit.Test)

Aggregations

SessionState (org.apache.hadoop.hive.ql.session.SessionState)112 IOException (java.io.IOException)28 HiveConf (org.apache.hadoop.hive.conf.HiveConf)22 ArrayList (java.util.ArrayList)14 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)14 Path (org.apache.hadoop.fs.Path)13 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)13 LinkedList (java.util.LinkedList)12 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)12 File (java.io.File)11 FileNotFoundException (java.io.FileNotFoundException)11 Map (java.util.Map)11 Test (org.junit.Test)10 PrintStream (java.io.PrintStream)9 ExecutionException (java.util.concurrent.ExecutionException)9 HashMap (java.util.HashMap)8 LinkedHashMap (java.util.LinkedHashMap)7 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)7 ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)6 Callable (java.util.concurrent.Callable)6