use of org.apache.hadoop.hive.ql.Context in project hive by apache.
the class TestUpdateDeleteSemanticAnalyzer method parseAndAnalyze.
private ReturnInfo parseAndAnalyze(String query, String testName) throws IOException, ParseException, HiveException {
SessionState.start(conf);
Context ctx = new Context(conf);
ctx.setCmd(query);
ctx.setHDFSCleanup(true);
ASTNode tree = ParseUtils.parse(query, ctx);
BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree);
SessionState.get().initTxnMgr(conf);
db = sem.getDb();
// I have to create the tables here (rather than in setup()) because I need the Hive
// connection, which is conveniently created by the semantic analyzer.
Map<String, String> params = new HashMap<String, String>(1);
params.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true");
db.createTable("T", Arrays.asList("a", "b"), null, OrcInputFormat.class, OrcOutputFormat.class, 2, Arrays.asList("a"), params);
db.createTable("U", Arrays.asList("a", "b"), Arrays.asList("ds"), OrcInputFormat.class, OrcOutputFormat.class, 2, Arrays.asList("a"), params);
Table u = db.getTable("U");
Map<String, String> partVals = new HashMap<String, String>(2);
partVals.put("ds", "yesterday");
db.createPartition(u, partVals);
partVals.clear();
partVals.put("ds", "today");
db.createPartition(u, partVals);
sem.analyze(tree, ctx);
// validate the plan
sem.validate();
QueryPlan plan = new QueryPlan(query, sem, 0L, testName, null, null);
return new ReturnInfo(sem, plan);
}
use of org.apache.hadoop.hive.ql.Context in project hive by apache.
the class AuthorizationTestUtil method analyze.
/**
* Create DDLWork from given ast
* @param ast
* @param conf
* @param db
* @return
* @throws Exception
*/
public static DDLWork analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception {
DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(queryState, db);
SessionState.start(queryState.getConf());
analyzer.analyze(ast, new Context(queryState.getConf()));
List<Task<? extends Serializable>> rootTasks = analyzer.getRootTasks();
return (DDLWork) inList(rootTasks).ofSize(1).get(0).getWork();
}
use of org.apache.hadoop.hive.ql.Context in project hive by apache.
the class TestGenTezWork method setUp.
/**
* @throws java.lang.Exception
*/
@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
// Init conf
final HiveConf conf = new HiveConf(SemanticAnalyzer.class);
SessionState.start(conf);
// Init parse context
final ParseContext pctx = new ParseContext();
pctx.setContext(new Context(conf));
ctx = new GenTezProcContext(conf, pctx, Collections.EMPTY_LIST, new ArrayList<Task<? extends Serializable>>(), Collections.EMPTY_SET, Collections.EMPTY_SET);
proc = new GenTezWork(new GenTezUtils() {
@Override
protected void setupMapWork(MapWork mapWork, GenTezProcContext context, PrunedPartitionList partitions, TableScanOperator root, String alias) throws SemanticException {
LinkedHashMap<String, Operator<? extends OperatorDesc>> map = new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
map.put("foo", root);
mapWork.setAliasToWork(map);
return;
}
});
CompilationOpContext cCtx = new CompilationOpContext();
fs = new FileSinkOperator(cCtx);
fs.setConf(new FileSinkDesc());
rs = new ReduceSinkOperator(cCtx);
rs.setConf(new ReduceSinkDesc());
TableDesc tableDesc = new TableDesc();
tableDesc.setProperties(new Properties());
rs.getConf().setKeySerializeInfo(tableDesc);
ts = new TableScanOperator(cCtx);
ts.setConf(new TableScanDesc(null));
ts.getChildOperators().add(rs);
rs.getParentOperators().add(ts);
rs.getChildOperators().add(fs);
fs.getParentOperators().add(rs);
ctx.preceedingWork = null;
ctx.currentRootOperator = ts;
}
use of org.apache.hadoop.hive.ql.Context in project hive by apache.
the class TestMacroSemanticAnalyzer method setup.
@Before
public void setup() throws Exception {
queryState = new QueryState(null);
conf = queryState.getConf();
SessionState.start(conf);
context = new Context(conf);
analyzer = new MacroSemanticAnalyzer(queryState);
}
use of org.apache.hadoop.hive.ql.Context in project hive by apache.
the class TestQBCompact method parseAndAnalyze.
private AlterTableSimpleDesc parseAndAnalyze(String query) throws Exception {
ParseDriver hd = new ParseDriver();
ASTNode head = (ASTNode) hd.parse(query).getChild(0);
BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(queryState, head);
a.analyze(head, new Context(conf));
List<Task<? extends Serializable>> roots = a.getRootTasks();
Assert.assertEquals(1, roots.size());
return ((DDLWork) roots.get(0).getWork()).getAlterTblSimpleDesc();
}
Aggregations