Search in sources :

Example 1 with HiveTableDef

use of com.sap.hadoop.windowing.query2.definition.HiveTableDef in project SQLWindowing by hbutani.

the class WindowingShell method execute.

protected void execute(QueryDef q, QueryOutputPrinter outP) throws WindowingException {
    QuerySpec qSpec = q.getSpec();
    try {
        executor.execute(q, this);
    } finally {
        HiveTableDef hiveTable = q.getHiveTableDef();
        if (hiveTable instanceof HiveQueryDef) {
            String tableName = hiveTable.getHiveTableSpec().getTableName();
            hiveQryExec.dropTable(tableName);
        }
    }
    if (qSpec.getOutput().getHiveTable() != null) {
        loadToOutputTable(q);
    }
    if (outP != null) {
        outP.printQueryOutput(q, cfg);
    }
}
Also used : HiveQueryDef(com.sap.hadoop.windowing.query2.definition.HiveQueryDef) QuerySpec(com.sap.hadoop.windowing.query2.specification.QuerySpec) HiveTableDef(com.sap.hadoop.windowing.query2.definition.HiveTableDef)

Example 2 with HiveTableDef

use of com.sap.hadoop.windowing.query2.definition.HiveTableDef in project SQLWindowing by hbutani.

the class InputTranslation method translate.

private static HiveQueryDef translate(QueryDef qDef, HiveQuerySpec spec) throws WindowingException {
    HiveQueryDef def = new HiveQueryDef();
    HiveQueryExecutor hiveQryExec = qDef.getTranslationInfo().getHiveQueryExecutor();
    Hive hive = qDef.getTranslationInfo().getHive();
    String tableName = hiveQryExec.createTableAsQuery(spec.getHiveQuery());
    HiveTableSpec tSpec = new HiveTableSpec();
    tSpec.setDbName(hive.getCurrentDatabase());
    tSpec.setTableName(tableName);
    tSpec.setPartition(spec.getPartition());
    tSpec.setOrder(spec.getOrder());
    def = (HiveQueryDef) InputTranslation.translate(qDef, tSpec, (HiveTableDef) def);
    return def;
}
Also used : HiveQueryExecutor(com.sap.hadoop.windowing.runtime2.HiveQueryExecutor) Hive(org.apache.hadoop.hive.ql.metadata.Hive) HiveQueryDef(com.sap.hadoop.windowing.query2.definition.HiveQueryDef) HiveTableSpec(com.sap.hadoop.windowing.query2.specification.HiveTableSpec)

Example 3 with HiveTableDef

use of com.sap.hadoop.windowing.query2.definition.HiveTableDef in project SQLWindowing by hbutani.

the class InputTranslation method getTableAlias.

private static String getTableAlias(QueryDef qDef, int inputNum, QueryInputDef inputDef) throws WindowingException {
    if (inputDef instanceof HiveTableDef) {
        HiveTableDef hTbldef = (HiveTableDef) inputDef;
        String db = ((HiveTableSpec) hTbldef.getSpec()).getDbName();
        String tableName = ((HiveTableSpec) hTbldef.getSpec()).getTableName();
        return db + "." + tableName;
    } else if (inputDef instanceof TableFuncDef) {
        return "ptf_" + inputNum;
    }
    throw new WindowingException(sprintf("Internal Error: attempt to translate %s", inputDef.getSpec()));
}
Also used : WindowingException(com.sap.hadoop.windowing.WindowingException) HiveTableSpec(com.sap.hadoop.windowing.query2.specification.HiveTableSpec) HiveTableDef(com.sap.hadoop.windowing.query2.definition.HiveTableDef) TableFuncDef(com.sap.hadoop.windowing.query2.definition.TableFuncDef)

Example 4 with HiveTableDef

use of com.sap.hadoop.windowing.query2.definition.HiveTableDef in project SQLWindowing by hbutani.

the class MRUtils method initialize.

/**
 * Construct the data structures containing ExprNodeDesc for partition
 * columns and order columns. Use the input definition to construct the list
 * of output columns for the ReduceSinkOperator
 *
 * @throws WindowingException
 */
public void initialize() throws WindowingException {
    TableFuncDef tabDef = RuntimeUtils.getFirstTableFunction(qdef);
    hiveTableDef = tabDef.getHiveTableDef();
    InputInfo inputInfo;
    ArrayList<ColumnDef> partColList = tabDef.getWindow().getPartDef().getColumns();
    TableFunctionEvaluator tEval = tabDef.getFunction();
    /*
		 * If the query has a map phase, the inputInfo is retrieved from the map
		 * output info of the table function definition. This is constructed
		 * using the map output oi of the table function definition. If the
		 * query does not have a map phase, the inputInfo is retrieved from the
		 * QueryInputDef (either HiveTableDef or HiveQueryDef) of the query.
		 */
    if (tEval.isTransformsRawInput()) {
        inputInfo = qdef.getTranslationInfo().getMapInputInfo(tabDef);
    } else {
        inputInfo = qdef.getTranslationInfo().getInputInfo(hiveTableDef);
    }
    for (ColumnDef colDef : partColList) {
        partCols.add(colDef.getExprNode());
    }
    ArrayList<OrderColumnDef> orderColList = tabDef.getWindow().getOrderDef().getColumns();
    for (OrderColumnDef colDef : orderColList) {
        Order order = colDef.getOrder();
        if (order.name().equals("ASC")) {
            orderString.append('+');
        } else {
            orderString.append('-');
        }
        orderCols.add(colDef.getExprNode());
        outputColumnNames.add(colDef.getAlias());
    }
    RowResolver rr = inputInfo.getRowResolver();
    ArrayList<ColumnInfo> colInfoList = rr.getColumnInfos();
    for (ColumnInfo colInfo : colInfoList) {
        String internalName = colInfo.getInternalName();
        TypeInfo type = colInfo.getType();
        valueCols.add(TranslateUtils.getExprDesc(internalName, type));
        outputColumnNames.add(internalName);
    }
}
Also used : Order(com.sap.hadoop.metadata.Order) OrderColumnDef(com.sap.hadoop.windowing.query2.definition.OrderColumnDef) ColumnDef(com.sap.hadoop.windowing.query2.definition.ColumnDef) OrderColumnDef(com.sap.hadoop.windowing.query2.definition.OrderColumnDef) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RowResolver(org.apache.hadoop.hive.ql.parse.RowResolver) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) TableFuncDef(com.sap.hadoop.windowing.query2.definition.TableFuncDef) InputInfo(com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo) TableFunctionEvaluator(com.sap.hadoop.windowing.functions2.TableFunctionEvaluator)

Example 5 with HiveTableDef

use of com.sap.hadoop.windowing.query2.definition.HiveTableDef in project SQLWindowing by hbutani.

the class InputTranslation method translate.

private static HiveTableDef translate(QueryDef qDef, HiveTableSpec spec, HiveTableDef def) throws WindowingException {
    def = def == null ? new HiveTableDef() : def;
    HiveMetaStoreClient hiveMSC = qDef.getTranslationInfo().getHiveMSClient();
    Hive hive = qDef.getTranslationInfo().getHive();
    def.setSpec(spec);
    if (spec.getDbName() == null) {
        spec.setDbName(hive.getCurrentDatabase());
    }
    try {
        Table t = hiveMSC.getTable(spec.getDbName(), spec.getTableName());
        qDef.getTranslationInfo().setTbl(TranslateUtils.getHiveMetaTable(hive, t.getDbName(), def.getHiveTableSpec().getTableName()));
        StorageDescriptor sd = t.getSd();
        def.setInputFormatClassName(sd.getInputFormat());
        def.setTableSerdeClassName(sd.getSerdeInfo().getSerializationLib());
        def.setTableSerdeProps(setupSerdeProps(qDef, sd));
        def.setLocation(sd.getLocation());
        Deserializer serde = HiveUtils.getDeserializer(qDef.getTranslationInfo().getHiveCfg(), t);
        def.setOI((StructObjectInspector) serde.getObjectInspector());
        def.setSerde((SerDe) serde);
    } catch (WindowingException we) {
        throw we;
    } catch (Exception he) {
        throw new WindowingException(he);
    }
    return def;
}
Also used : Hive(org.apache.hadoop.hive.ql.metadata.Hive) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) Table(org.apache.hadoop.hive.metastore.api.Table) Deserializer(org.apache.hadoop.hive.serde2.Deserializer) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) WindowingException(com.sap.hadoop.windowing.WindowingException) WindowingException(com.sap.hadoop.windowing.WindowingException) HiveTableDef(com.sap.hadoop.windowing.query2.definition.HiveTableDef)

Aggregations

HiveTableDef (com.sap.hadoop.windowing.query2.definition.HiveTableDef)3 WindowingException (com.sap.hadoop.windowing.WindowingException)2 HiveQueryDef (com.sap.hadoop.windowing.query2.definition.HiveQueryDef)2 TableFuncDef (com.sap.hadoop.windowing.query2.definition.TableFuncDef)2 HiveTableSpec (com.sap.hadoop.windowing.query2.specification.HiveTableSpec)2 Hive (org.apache.hadoop.hive.ql.metadata.Hive)2 Order (com.sap.hadoop.metadata.Order)1 TableFunctionEvaluator (com.sap.hadoop.windowing.functions2.TableFunctionEvaluator)1 ColumnDef (com.sap.hadoop.windowing.query2.definition.ColumnDef)1 OrderColumnDef (com.sap.hadoop.windowing.query2.definition.OrderColumnDef)1 QuerySpec (com.sap.hadoop.windowing.query2.specification.QuerySpec)1 InputInfo (com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo)1 HiveQueryExecutor (com.sap.hadoop.windowing.runtime2.HiveQueryExecutor)1 HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)1 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)1 Table (org.apache.hadoop.hive.metastore.api.Table)1 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)1 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)1 Deserializer (org.apache.hadoop.hive.serde2.Deserializer)1 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)1