use of org.apache.hadoop.hive.ql.parse.ParseDriver in project hive by apache.
the class QTestUtil method init.
public void init() throws Exception {
// Create remote dirs once.
if (mr != null) {
createRemoteDirs();
}
testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
String execEngine = conf.get("hive.execution.engine");
conf.set("hive.execution.engine", "mr");
SessionState.start(conf);
conf.set("hive.execution.engine", execEngine);
db = Hive.get(conf);
drv = new Driver(conf);
drv.init();
pd = new ParseDriver();
sem = new SemanticAnalyzer(queryState);
}
use of org.apache.hadoop.hive.ql.parse.ParseDriver in project phoenix by apache.
the class HiveTestUtil method init.
public void init() throws Exception {
testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, false);
String execEngine = conf.get("hive.execution.engine");
conf.set("hive.execution.engine", "mr");
SessionState.start(conf);
conf.set("hive.execution.engine", execEngine);
db = Hive.get(conf);
pd = new ParseDriver();
sem = new SemanticAnalyzer(conf);
}
use of org.apache.hadoop.hive.ql.parse.ParseDriver in project hive by apache.
the class TestHiveAuthorizationTaskFactory method setup.
@Before
public void setup() throws Exception {
queryState = new QueryState(null);
HiveConf conf = queryState.getConf();
conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY, TestHiveAuthorizationTaskFactory.DummyHiveAuthorizationTaskFactoryImpl.class.getName());
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
db = Mockito.mock(Hive.class);
table = new Table(DB, TABLE);
partition = new Partition(table);
SessionState.start(conf);
context = new Context(conf);
parseDriver = new ParseDriver();
analyzer = new DDLSemanticAnalyzer(queryState, db);
Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table);
Mockito.when(db.getTable(TABLE_QNAME, false)).thenReturn(table);
Mockito.when(db.getPartition(table, new HashMap<String, String>(), false)).thenReturn(partition);
HadoopDefaultAuthenticator auth = new HadoopDefaultAuthenticator();
auth.setConf(conf);
currentUser = auth.getUserName();
DummyHiveAuthorizationTaskFactoryImpl.reset();
}
use of org.apache.hadoop.hive.ql.parse.ParseDriver in project hive by apache.
the class QTestUtil method resetParser.
public void resetParser() throws SemanticException {
drv.init();
pd = new ParseDriver();
queryState = new QueryState(conf);
sem = new SemanticAnalyzer(queryState);
}
use of org.apache.hadoop.hive.ql.parse.ParseDriver in project incubator-atlas by apache.
the class HiveASTRewriter method rewrite.
public String rewrite(String sourceQry) throws RewriteException {
String result = sourceQry;
ASTNode tree = null;
try {
ParseDriver pd = new ParseDriver();
tree = pd.parse(sourceQry, queryContext, true);
tree = ParseUtils.findRootNonNullToken(tree);
this.rwCtx = new RewriteContext(sourceQry, tree, queryContext.getTokenRewriteStream());
rewrite(tree);
result = toSQL();
} catch (ParseException e) {
LOG.error("Could not parse the query {} ", sourceQry, e);
throw new RewriteException("Could not parse query : ", e);
}
return result;
}
Aggregations