Search in sources :

Example 36 with Converter

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter in project hive by apache.

the class GenericUDFDateDiff method checkArguments.

private Converter checkArguments(ObjectInspector[] arguments, int i) throws UDFArgumentException {
    if (arguments[i].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[i].getTypeName() + " is passed. as first arguments");
    }
    PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
    Converter converter;
    switch(inputType) {
        case STRING:
        case VARCHAR:
        case CHAR:
            converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
            break;
        case TIMESTAMP:
            converter = new TimestampConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
            break;
        case DATE:
            converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
            break;
        default:
            throw new UDFArgumentException(" DATEDIFF() only takes STRING/TIMESTAMP/DATEWRITABLE types as " + (i + 1) + "-th argument, got " + inputType);
    }
    return converter;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) TimestampConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter) TimestampConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter) Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)

Example 37 with Converter

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter in project hive by apache.

the class TestETypeConverter method testTimestampInt96ConverterGMT.

@Test
public void testTimestampInt96ConverterGMT() {
    PrimitiveConverter converter;
    parent.metadata.put(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY, "GMT");
    converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
    converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance(TimeZone.getTimeZone("GMT"))).toBinary());
    parent.assertWritableValue(new TimestampWritable(ts));
}
Also used : PrimitiveConverter(org.apache.parquet.io.api.PrimitiveConverter) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) Test(org.junit.Test)

Example 38 with Converter

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter in project hive by apache.

the class TestETypeConverter method testTimestampInt96ConverterLocal.

@Test
public void testTimestampInt96ConverterLocal() {
    PrimitiveConverter converter;
    // Default timezone should be Localtime
    converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
    converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance()).toBinary());
    parent.assertWritableValue(new TimestampWritable(ts));
}
Also used : PrimitiveConverter(org.apache.parquet.io.api.PrimitiveConverter) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) Test(org.junit.Test)

Example 39 with Converter

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter in project hive by apache.

the class DynamicPartitionPruner method prunePartitionSingleSource.

@VisibleForTesting
protected void prunePartitionSingleSource(String source, SourceInfo si) throws HiveException {
    if (si.skipPruning.get()) {
        // in this case we've determined that there's too much data
        // to prune dynamically.
        LOG.info("Skip pruning on " + source + ", column " + si.columnName);
        return;
    }
    Set<Object> values = si.values;
    String columnName = si.columnName;
    if (LOG.isDebugEnabled()) {
        StringBuilder sb = new StringBuilder("Pruning ");
        sb.append(columnName);
        sb.append(" with ");
        for (Object value : values) {
            sb.append(value == null ? null : value.toString());
            sb.append(", ");
        }
        LOG.debug(sb.toString());
    }
    ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(si.columnType));
    Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi);
    StructObjectInspector soi = ObjectInspectorFactory.getStandardStructObjectInspector(Collections.singletonList(columnName), Collections.singletonList(oi));
    @SuppressWarnings("rawtypes") ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(si.partKey);
    eval.initialize(soi);
    applyFilterToPartitions(converter, eval, columnName, values);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 40 with Converter

use of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter in project SQLWindowing by hbutani.

the class Executor method executeSelectList.

/**
	 * For each row in the partition: 
	 * 1. evaluate the where condition if applicable.
	 * 2. evaluate the value for each column retrieved 
	 *    from the select list
	 * 3. Forward the writable value or object based on the 
	 * 	  implementation of the ForwardSink   
	 * @param qDef
	 * @param oPart
	 * @param rS
	 * @throws WindowingException
	 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static void executeSelectList(QueryDef qDef, Partition oPart, ForwardSink rS) throws WindowingException {
    ArrayList<ColumnDef> cols = qDef.getSelectList().getColumns();
    ObjectInspector selectOI = qDef.getSelectList().getOI();
    SerDe oSerDe = qDef.getOutput().getSerDe();
    Object[] output = new Object[cols.size()];
    WhereDef whDef = qDef.getWhere();
    boolean applyWhere = whDef != null;
    Converter whConverter = !applyWhere ? null : ObjectInspectorConverters.getConverter(whDef.getOI(), PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
    ExprNodeEvaluator whCondEval = !applyWhere ? null : whDef.getExprEvaluator();
    Writable value = null;
    PartitionIterator<Object> pItr = oPart.iterator();
    RuntimeUtils.connectLeadLagFunctionsToPartition(qDef, pItr);
    while (pItr.hasNext()) {
        int colCnt = 0;
        ArrayList selectList = new ArrayList();
        Object oRow = pItr.next();
        if (applyWhere) {
            Object whCond = null;
            try {
                whCond = whCondEval.evaluate(oRow);
                whCond = whConverter.convert(whCond);
            } catch (HiveException he) {
                throw new WindowingException(he);
            }
            if (whCond == null || !((Boolean) whCond).booleanValue()) {
                continue;
            }
        }
        for (ColumnDef cDef : cols) {
            try {
                Object newCol = cDef.getExprEvaluator().evaluate(oRow);
                output[colCnt++] = newCol;
                selectList.add(newCol);
            } catch (HiveException he) {
                throw new WindowingException(he);
            }
        }
        //else collect the writable key-value pairs for outstream
        if (rS.acceptObject()) {
            rS.collectOutput(output);
        } else {
            try {
                value = oSerDe.serialize(selectList, selectOI);
            } catch (SerDeException se) {
                throw new WindowingException(se);
            }
            rS.collectOutput(NullWritable.get(), value);
        }
    }
}
Also used : SerDe(org.apache.hadoop.hive.serde2.SerDe) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) WhereDef(com.sap.hadoop.windowing.query2.definition.WhereDef) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) ArrayList(java.util.ArrayList) ColumnDef(com.sap.hadoop.windowing.query2.definition.ColumnDef) NullWritable(org.apache.hadoop.io.NullWritable) Writable(org.apache.hadoop.io.Writable) WindowingException(com.sap.hadoop.windowing.WindowingException) Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)20 Converter (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter)17 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)16 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)13 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)10 ArrayList (java.util.ArrayList)8 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)8 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)6 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)6 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)5 Test (org.junit.Test)5 List (java.util.List)4 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)4 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)4 ListObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector)4 PrimitiveConverter (org.apache.parquet.io.api.PrimitiveConverter)4 Properties (java.util.Properties)3 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)3 BytesWritable (org.apache.hadoop.io.BytesWritable)3