Search in sources :

Example 6 with Context

use of org.apache.hadoop.hive.ql.Context in project hive by apache.

the class TestUpdateDeleteSemanticAnalyzer method parseAndAnalyze.

private ReturnInfo parseAndAnalyze(String query, String testName) throws IOException, ParseException, HiveException {
    SessionState.start(conf);
    Context ctx = new Context(conf);
    ctx.setCmd(query);
    ctx.setHDFSCleanup(true);
    ASTNode tree = ParseUtils.parse(query, ctx);
    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(queryState, tree);
    SessionState.get().initTxnMgr(conf);
    db = sem.getDb();
    // I have to create the tables here (rather than in setup()) because I need the Hive
    // connection, which is conveniently created by the semantic analyzer.
    Map<String, String> params = new HashMap<String, String>(1);
    params.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true");
    db.createTable("T", Arrays.asList("a", "b"), null, OrcInputFormat.class, OrcOutputFormat.class, 2, Arrays.asList("a"), params);
    db.createTable("U", Arrays.asList("a", "b"), Arrays.asList("ds"), OrcInputFormat.class, OrcOutputFormat.class, 2, Arrays.asList("a"), params);
    Table u = db.getTable("U");
    Map<String, String> partVals = new HashMap<String, String>(2);
    partVals.put("ds", "yesterday");
    db.createPartition(u, partVals);
    partVals.clear();
    partVals.put("ds", "today");
    db.createPartition(u, partVals);
    sem.analyze(tree, ctx);
    // validate the plan
    sem.validate();
    QueryPlan plan = new QueryPlan(query, sem, 0L, testName, null, null);
    return new ReturnInfo(sem, plan);
}
Also used : Context(org.apache.hadoop.hive.ql.Context) Table(org.apache.hadoop.hive.ql.metadata.Table) HashMap(java.util.HashMap) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan)

Example 7 with Context

use of org.apache.hadoop.hive.ql.Context in project hive by apache.

the class AuthorizationTestUtil method analyze.

/**
   * Create DDLWork from given ast
   * @param ast
   * @param conf
   * @param db
   * @return
   * @throws Exception
   */
public static DDLWork analyze(ASTNode ast, QueryState queryState, Hive db) throws Exception {
    DDLSemanticAnalyzer analyzer = new DDLSemanticAnalyzer(queryState, db);
    SessionState.start(queryState.getConf());
    analyzer.analyze(ast, new Context(queryState.getConf()));
    List<Task<? extends Serializable>> rootTasks = analyzer.getRootTasks();
    return (DDLWork) inList(rootTasks).ofSize(1).get(0).getWork();
}
Also used : Context(org.apache.hadoop.hive.ql.Context) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) DDLSemanticAnalyzer(org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer)

Example 8 with Context

use of org.apache.hadoop.hive.ql.Context in project hive by apache.

the class TestGenTezWork method setUp.

/**
   * @throws java.lang.Exception
   */
@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
    // Init conf
    final HiveConf conf = new HiveConf(SemanticAnalyzer.class);
    SessionState.start(conf);
    // Init parse context
    final ParseContext pctx = new ParseContext();
    pctx.setContext(new Context(conf));
    ctx = new GenTezProcContext(conf, pctx, Collections.EMPTY_LIST, new ArrayList<Task<? extends Serializable>>(), Collections.EMPTY_SET, Collections.EMPTY_SET);
    proc = new GenTezWork(new GenTezUtils() {

        @Override
        protected void setupMapWork(MapWork mapWork, GenTezProcContext context, PrunedPartitionList partitions, TableScanOperator root, String alias) throws SemanticException {
            LinkedHashMap<String, Operator<? extends OperatorDesc>> map = new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
            map.put("foo", root);
            mapWork.setAliasToWork(map);
            return;
        }
    });
    CompilationOpContext cCtx = new CompilationOpContext();
    fs = new FileSinkOperator(cCtx);
    fs.setConf(new FileSinkDesc());
    rs = new ReduceSinkOperator(cCtx);
    rs.setConf(new ReduceSinkDesc());
    TableDesc tableDesc = new TableDesc();
    tableDesc.setProperties(new Properties());
    rs.getConf().setKeySerializeInfo(tableDesc);
    ts = new TableScanOperator(cCtx);
    ts.setConf(new TableScanDesc(null));
    ts.getChildOperators().add(rs);
    rs.getParentOperators().add(ts);
    rs.getChildOperators().add(fs);
    fs.getParentOperators().add(rs);
    ctx.preceedingWork = null;
    ctx.currentRootOperator = ts;
}
Also used : Context(org.apache.hadoop.hive.ql.Context) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) FileSinkDesc(org.apache.hadoop.hive.ql.plan.FileSinkDesc) ArrayList(java.util.ArrayList) TableScanDesc(org.apache.hadoop.hive.ql.plan.TableScanDesc) Properties(java.util.Properties) LinkedHashMap(java.util.LinkedHashMap) MapWork(org.apache.hadoop.hive.ql.plan.MapWork) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) HiveConf(org.apache.hadoop.hive.conf.HiveConf) TableDesc(org.apache.hadoop.hive.ql.plan.TableDesc) ReduceSinkDesc(org.apache.hadoop.hive.ql.plan.ReduceSinkDesc) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc) Before(org.junit.Before)

Example 9 with Context

use of org.apache.hadoop.hive.ql.Context in project hive by apache.

the class TestMacroSemanticAnalyzer method setup.

@Before
public void setup() throws Exception {
    queryState = new QueryState(null);
    conf = queryState.getConf();
    SessionState.start(conf);
    context = new Context(conf);
    analyzer = new MacroSemanticAnalyzer(queryState);
}
Also used : Context(org.apache.hadoop.hive.ql.Context) QueryState(org.apache.hadoop.hive.ql.QueryState) Before(org.junit.Before)

Example 10 with Context

use of org.apache.hadoop.hive.ql.Context in project hive by apache.

the class TestQBCompact method parseAndAnalyze.

private AlterTableSimpleDesc parseAndAnalyze(String query) throws Exception {
    ParseDriver hd = new ParseDriver();
    ASTNode head = (ASTNode) hd.parse(query).getChild(0);
    BaseSemanticAnalyzer a = SemanticAnalyzerFactory.get(queryState, head);
    a.analyze(head, new Context(conf));
    List<Task<? extends Serializable>> roots = a.getRootTasks();
    Assert.assertEquals(1, roots.size());
    return ((DDLWork) roots.get(0).getWork()).getAlterTblSimpleDesc();
}
Also used : Context(org.apache.hadoop.hive.ql.Context) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Aggregations

Context (org.apache.hadoop.hive.ql.Context)47 Path (org.apache.hadoop.fs.Path)19 IOException (java.io.IOException)15 DriverContext (org.apache.hadoop.hive.ql.DriverContext)15 CompilationOpContext (org.apache.hadoop.hive.ql.CompilationOpContext)13 FileSystem (org.apache.hadoop.fs.FileSystem)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)9 Serializable (java.io.Serializable)8 Task (org.apache.hadoop.hive.ql.exec.Task)7 HiveConf (org.apache.hadoop.hive.conf.HiveConf)5 Table (org.apache.hadoop.hive.ql.metadata.Table)5 ParseContext (org.apache.hadoop.hive.ql.parse.ParseContext)5 TableDesc (org.apache.hadoop.hive.ql.plan.TableDesc)5 ArrayList (java.util.ArrayList)4 EnvironmentContext (org.apache.hadoop.hive.metastore.api.EnvironmentContext)4 TableScanOperator (org.apache.hadoop.hive.ql.exec.TableScanOperator)4 HiveTxnManager (org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager)4 MapWork (org.apache.hadoop.hive.ql.plan.MapWork)4 JobClient (org.apache.hadoop.mapred.JobClient)4 JobConf (org.apache.hadoop.mapred.JobConf)4