Search in sources :

Example 51 with WindowingException

use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.

the class QueryDefDeserializer method visit.

/*
	 * Use the serde class name and properties on the query output specification
	 * to recreate the serde and OI on the query output definition
	 */
@Override
public void visit(QueryOutputDef output) throws WindowingException {
    String serDeClassName = output.getOutputSpec().getSerDeClass();
    Properties serDeProps = output.getOutputSpec().getSerDeProps();
    try {
        SerDe serDe = (SerDe) SerDeUtils.lookupDeserializer(serDeClassName);
        serDe.initialize(hConf, serDeProps);
        output.setSerDe(serDe);
    } catch (SerDeException se) {
        throw new WindowingException(se);
    }
}
Also used : SerDe(org.apache.hadoop.hive.serde2.SerDe) WindowingException(com.sap.hadoop.windowing.WindowingException) Properties(java.util.Properties) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 52 with WindowingException

use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.

the class QueryDefDeserializer method visit.

/*
	 * 1. Determine the serde properties from the hive table definition 
	 * 2. Retrieve the deserializer using the serde class name and 
	 *    initialize it using the serdeproperties 
	 * 3. setup the serde and OI on the hive table definition The OI is 
	 *    used to evaluate expressions in the next PTF in the
	 *    chain. This OI is constructed from the hive meta table info. 
	 * 4. We add the hive table definition to the input 
	 *    map on the query translation info.
	 */
@Override
public void visit(HiveTableDef hiveTable) throws WindowingException {
    this.qInDef = hiveTable;
    String serDeClassName = hiveTable.getTableSerdeClassName();
    Properties serDeProps = new Properties();
    Map<String, String> serdePropsMap = hiveTable.getTableSerdeProps();
    for (String serdeName : serdePropsMap.keySet()) {
        serDeProps.setProperty(serdeName, serdePropsMap.get(serdeName));
    }
    try {
        SerDe serDe = (SerDe) SerDeUtils.lookupDeserializer(serDeClassName);
        serDe.initialize(hConf, serDeProps);
        hiveTable.setSerde(serDe);
        hiveTable.setOI((StructObjectInspector) serDe.getObjectInspector());
    } catch (SerDeException se) {
        throw new WindowingException(se);
    }
    tInfo.addInput(hiveTable);
    inputInfo = tInfo.getInputInfo(hiveTable);
}
Also used : SerDe(org.apache.hadoop.hive.serde2.SerDe) WindowingException(com.sap.hadoop.windowing.WindowingException) Properties(java.util.Properties) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 53 with WindowingException

use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.

the class TranslateUtils method validateValueBoundaryExprType.

public static void validateValueBoundaryExprType(ObjectInspector OI) throws WindowingException {
    if (!OI.getCategory().equals(Category.PRIMITIVE)) {
        throw new WindowingException("Value Boundary expression must be of primitve type");
    }
    PrimitiveObjectInspector pOI = (PrimitiveObjectInspector) OI;
    PrimitiveCategory pC = pOI.getPrimitiveCategory();
    switch(pC) {
        case BYTE:
        case DOUBLE:
        case FLOAT:
        case INT:
        case LONG:
        case SHORT:
        case TIMESTAMP:
            break;
        default:
            throw new WindowingException(sprintf("Primitve type %s not supported in Value Boundary expression", pC));
    }
}
Also used : WindowingException(com.sap.hadoop.windowing.WindowingException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)

Example 54 with WindowingException

use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.

the class CompositeDataType method define.

public static CompositeDataType define(StructObjectInspector OI) throws WindowingException {
    List<? extends StructField> fields = OI.getAllStructFieldRefs();
    @SuppressWarnings("unchecked") DataType<? extends WritableComparable>[] elementTypes = (DataType<? extends WritableComparable>[]) new DataType[fields.size()];
    int i = 0;
    for (StructField f : fields) {
        ObjectInspector fOI = f.getFieldObjectInspector();
        if (fOI.getCategory() != Category.PRIMITIVE) {
            throw new WindowingException("Cannot handle non primitve fields for partitioning/sorting");
        }
        PrimitiveObjectInspector pOI = (PrimitiveObjectInspector) fOI;
        switch(pOI.getPrimitiveCategory()) {
            case BOOLEAN:
                elementTypes[i] = BOOLEAN;
                break;
            case DOUBLE:
                elementTypes[i] = DOUBLE;
                break;
            case BYTE:
                elementTypes[i] = BYTE;
                break;
            case FLOAT:
                elementTypes[i] = FLOAT;
                break;
            case INT:
                elementTypes[i] = INT;
                break;
            case LONG:
                elementTypes[i] = LONG;
                break;
            case SHORT:
                elementTypes[i] = SHORT;
                break;
            case STRING:
                elementTypes[i] = TEXT;
                break;
            default:
                throw new WindowingException(Utils.sprintf("Cannot handle datatype %s for partitioning/sorting", pOI.toString()));
        }
        i++;
    }
    return new CompositeDataType(",", elementTypes);
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) WritableComparable(org.apache.hadoop.io.WritableComparable) WindowingException(com.sap.hadoop.windowing.WindowingException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 55 with WindowingException

use of com.sap.hadoop.windowing.WindowingException in project SQLWindowing by hbutani.

the class WindowingHiveCliDriver method setupWindowing.

public void setupWindowing() throws WindowingException {
    // not nice, but ...
    try {
        // NoSuchFieldException
        Field f = CliDriver.class.getDeclaredField("conf");
        f.setAccessible(true);
        // IllegalAccessException
        cfg = (HiveConf) f.get(this);
        // NoSuchFieldException
        f = CliDriver.class.getDeclaredField("console");
        f.setAccessible(true);
        // IllegalAccessException
        hiveConsole = (LogHelper) f.get(this);
    } catch (Throwable t) {
        throw new WindowingException("Failed to access conf and console members from HiveCliDriver", t);
    }
    // initialize windowing client
    wClient = new WindowingClient(this);
}
Also used : Field(java.lang.reflect.Field) WindowingException(com.sap.hadoop.windowing.WindowingException) CliDriver(org.apache.hadoop.hive.cli.CliDriver)

Aggregations

WindowingException (com.sap.hadoop.windowing.WindowingException)62 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)18 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)11 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)10 IOException (java.io.IOException)9 SerDe (org.apache.hadoop.hive.serde2.SerDe)9 ExprNodeEvaluator (org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator)8 ArrayList (java.util.ArrayList)7 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)7 HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)6 Properties (java.util.Properties)5 Path (org.apache.hadoop.fs.Path)5 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)5 Table (org.apache.hadoop.hive.metastore.api.Table)5 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)5 Writable (org.apache.hadoop.io.Writable)5 TableFuncDef (com.sap.hadoop.windowing.query2.definition.TableFuncDef)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)4 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)4 ArgDef (com.sap.hadoop.windowing.query2.definition.ArgDef)3