use of org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer in project hive by apache.
the class AuthorizationTestUtil method analyze.
/**
* Create DDLWork from given ast
* @param ast
* @param conf
* @param db
* @return
* @throws Exception
*/
public static DDLWork analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception {
DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(queryState, db);
SessionState.start(queryState.getConf());
analyzer.analyze(ast, new Context(queryState.getConf()));
List<Task<? extends Serializable>> rootTasks = analyzer.getRootTasks();
return (DDLWork) inList(rootTasks).ofSize(1).get(0).getWork();
}
use of org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer in project hive by apache.
the class TestHiveAuthorizationTaskFactory method setup.
@Before
public void setup() throws Exception {
queryState = new QueryState(null);
HiveConf conf = queryState.getConf();
conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY, TestHiveAuthorizationTaskFactory.DummyHiveAuthorizationTaskFactoryImpl.class.getName());
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
db = Mockito.mock(Hive.class);
table = new Table(DB, TABLE);
partition = new Partition(table);
SessionState.start(conf);
context = new Context(conf);
parseDriver = new ParseDriver();
analyzer = new DDLSemanticAnalyzer(queryState, db);
Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table);
Mockito.when(db.getTable(TABLE_QNAME, false)).thenReturn(table);
Mockito.when(db.getPartition(table, new HashMap<String, String>(), false)).thenReturn(partition);
HadoopDefaultAuthenticator auth = new HadoopDefaultAuthenticator();
auth.setConf(conf);
currentUser = auth.getUserName();
DummyHiveAuthorizationTaskFactoryImpl.reset();
}
Aggregations