Search in sources :

Example 56 with WindowingException

use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.

the class ListFactory method createList.

@SuppressWarnings("unchecked")
public static ByteBasedList createList(String clsName, int capacity) throws WindowingException {
    try {
        Class<? extends ByteBasedList> cls = (Class<? extends ByteBasedList>) Class.forName(clsName);
        Constructor<? extends ByteBasedList> cons = cls.getConstructor(Integer.TYPE);
        return cons.newInstance(capacity);
    } catch (Exception e) {
        throw new WindowingException(e);
    }
}
Also used : WindowingException(com.sap.hadoop.windowing.WindowingException) WindowingException(com.sap.hadoop.windowing.WindowingException)

Example 57 with WindowingException

use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.

the class HiveUtils method getRowResolver.

public static RowResolver getRowResolver(String tabAlias, StructObjectInspector rowObjectInspector) throws WindowingException {
    LOG.info("HiveUtils::getRowResolver invoked on ObjectInspector");
    try {
        RowResolver rwsch = new RowResolver();
        List<? extends StructField> fields = rowObjectInspector.getAllStructFieldRefs();
        for (int i = 0; i < fields.size(); i++) {
            rwsch.put(tabAlias, fields.get(i).getFieldName(), new ColumnInfo(fields.get(i).getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i).getFieldObjectInspector()), tabAlias, false));
        }
        return rwsch;
    } catch (Exception me) {
        throw new WindowingException(me);
    }
}
Also used : WindowingException(com.sap.hadoop.windowing.WindowingException) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RowResolver(org.apache.hadoop.hive.ql.parse.RowResolver) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) WindowingException(com.sap.hadoop.windowing.WindowingException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 58 with WindowingException

use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.

the class HiveUtils method getRowResolver.

public static RowResolver getRowResolver(String db, String table, String alias, HiveConf conf) throws WindowingException {
    LOG.info("HiveUtils::getRowResolver invoked on " + table);
    try {
        HiveMetaStoreClient client = getClient(conf);
        db = validateDB(client, db);
        org.apache.hadoop.hive.ql.metadata.Table t = Hive.get(conf).getTable(db, table);
        StructObjectInspector rowObjectInspector = (StructObjectInspector) t.getDeserializer().getObjectInspector();
        RowResolver rwsch = getRowResolver(alias, rowObjectInspector);
        for (FieldSchema part_col : t.getPartCols()) {
            LOG.trace("Adding partition col: " + part_col);
            rwsch.put(alias, part_col.getName(), new ColumnInfo(part_col.getName(), TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), alias, true));
        }
        Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf).iterator();
        // use a list for easy cumtomize
        List<VirtualColumn> vcList = new ArrayList<VirtualColumn>();
        while (vcs.hasNext()) {
            VirtualColumn vc = vcs.next();
            rwsch.put(alias, vc.getName(), new ColumnInfo(vc.getName(), vc.getTypeInfo(), alias, true, vc.getIsHidden()));
            vcList.add(vc);
        }
        return rwsch;
    } catch (WindowingException w) {
        throw w;
    } catch (Exception me) {
        throw new WindowingException(me);
    }
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RowResolver(org.apache.hadoop.hive.ql.parse.RowResolver) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) WindowingException(com.sap.hadoop.windowing.WindowingException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) WindowingException(com.sap.hadoop.windowing.WindowingException) VirtualColumn(org.apache.hadoop.hive.ql.metadata.VirtualColumn) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 59 with WindowingException

use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.

the class IOUtils method createTableWindowingInput.

@SuppressWarnings("unchecked")
public static WindowingInput createTableWindowingInput(String dbName, String tableName, Configuration conf) throws WindowingException {
    try {
        HiveMetaStoreClient client = HiveUtils.getClient(conf);
        String db = HiveUtils.validateDB(client, dbName);
        Table t = HiveUtils.getTable(client, db, tableName);
        StorageDescriptor sd = t.getSd();
        HiveConf hConf = new HiveConf(conf, IOUtils.class);
        JobConf job = new JobConf(hConf);
        Class<? extends InputFormat<? extends Writable, ? extends Writable>> inputFormatClass = (Class<? extends InputFormat<? extends Writable, ? extends Writable>>) Class.forName(sd.getInputFormat());
        hConf.setClass("mapred.input.format.class", inputFormatClass, InputFormat.class);
        hConf.set(INPUT_INPUTFORMAT_CLASS, inputFormatClass.getName());
        InputFormat<? extends Writable, ? extends Writable> iFmt = inputFormatClass.newInstance();
        if (iFmt instanceof TextInputFormat) {
            ((TextInputFormat) iFmt).configure(job);
        }
        Path p = new Path(sd.getLocation());
        /*
			 * Convert the Path in the StorageDescriptor into a Path in the current FileSystem.
			 * Used in testing: Jobs run on MiniDFSCluster, whereas hive metadata refers to a real cluster.
			 */
        {
            p = makeQualified(p, conf);
        }
        FileInputFormat.addInputPath(job, p);
        InputSplit[] iSplits = iFmt.getSplits(job, 1);
        org.apache.hadoop.mapred.RecordReader<Writable, Writable> rdr = (org.apache.hadoop.mapred.RecordReader<Writable, Writable>) iFmt.getRecordReader(iSplits[0], job, Reporter.NULL);
        hConf.set(INPUT_PATH, sd.getLocation());
        hConf.set(INPUT_KEY_CLASS, rdr.createKey().getClass().getName());
        hConf.set(INPUT_VALUE_CLASS, rdr.createValue().getClass().getName());
        hConf.set(INPUT_SERDE_CLASS, sd.getSerdeInfo().getSerializationLib());
        TableWindowingInput tIn = new TableWindowingInput();
        tIn.initialize(null, hConf, MetaStoreUtils.getSchema(t));
        return tIn;
    } catch (WindowingException w) {
        throw w;
    } catch (Exception e) {
        throw new WindowingException(e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) Table(org.apache.hadoop.hive.metastore.api.Table) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Writable(org.apache.hadoop.io.Writable) IOException(java.io.IOException) WindowingException(com.sap.hadoop.windowing.WindowingException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) TextInputFormat(org.apache.hadoop.mapred.TextInputFormat) TextInputFormat(org.apache.hadoop.mapred.TextInputFormat) InputFormat(org.apache.hadoop.mapred.InputFormat) FileInputFormat(org.apache.hadoop.mapred.FileInputFormat) WindowingException(com.sap.hadoop.windowing.WindowingException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) JobConf(org.apache.hadoop.mapred.JobConf) InputSplit(org.apache.hadoop.mapred.InputSplit)

Example 60 with WindowingException

use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.

the class TranslateUtils method setupSerdeAndOI.

/**
 * For NOOP table functions, the serde is the same as that on the input
 * hive table,; for other table functions it is the lazy binary serde.
 * If the query has a map-phase, the map oi is set to be the oi on the
 * lazy binary serde unless the table function is a NOOP_MAP_TABLE_FUNCTION
 * (in which case it is set to the oi on the serde of the input hive
 * table definition).
 * @param tDef
 * @param inputDef
 * @param tInfo
 * @param tEval
 * @throws WindowingException
 */
public static void setupSerdeAndOI(TableFuncDef tDef, QueryInputDef inputDef, QueryTranslationInfo tInfo, TableFunctionEvaluator tEval) throws WindowingException {
    /*
		 * setup the SerDe.
		 */
    SerDe serde = null;
    // to the next function in the chain.
    if (tDef.getName().equals(FunctionRegistry.NOOP_TABLE_FUNCTION) || tDef.getName().equals(FunctionRegistry.NOOP_MAP_TABLE_FUNCTION)) {
        serde = inputDef.getSerde();
    } else {
        serde = TranslateUtils.createLazyBinarySerDe(tInfo.getHiveCfg(), tEval.getOutputOI());
    }
    tDef.setSerde(serde);
    try {
        tDef.setOI((StructObjectInspector) serde.getObjectInspector());
    } catch (SerDeException se) {
        throw new WindowingException(se);
    }
    if (tEval.isTransformsRawInput()) {
        if (tDef.getName().equals(FunctionRegistry.NOOP_MAP_TABLE_FUNCTION)) {
            serde = inputDef.getSerde();
        } else {
            serde = TranslateUtils.createLazyBinarySerDe(tInfo.getHiveCfg(), tEval.getRawInputOI());
        }
        try {
            tDef.setMapOI((StructObjectInspector) serde.getObjectInspector());
        } catch (SerDeException se) {
            throw new WindowingException(se);
        }
    }
}
Also used : SerDe(org.apache.hadoop.hive.serde2.SerDe) LazyBinarySerDe(org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe) WindowingException(com.sap.hadoop.windowing.WindowingException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Aggregations

WindowingException (com.sap.hadoop.windowing.WindowingException)62 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)18 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)11 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)10 IOException (java.io.IOException)9 SerDe (org.apache.hadoop.hive.serde2.SerDe)9 ExprNodeEvaluator (org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator)8 ArrayList (java.util.ArrayList)7 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)7 HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)6 Properties (java.util.Properties)5 Path (org.apache.hadoop.fs.Path)5 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)5 Table (org.apache.hadoop.hive.metastore.api.Table)5 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)5 Writable (org.apache.hadoop.io.Writable)5 TableFuncDef (com.sap.hadoop.windowing.query2.definition.TableFuncDef)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)4 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)4 ArgDef (com.sap.hadoop.windowing.query2.definition.ArgDef)3