Search in sources :

Example 86 with PrimitiveTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.

the class PartExprEvalUtils method prepareExpr.

public static synchronized ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> prepareExpr(ExprNodeGenericFuncDesc expr, List<String> partColumnNames, List<PrimitiveTypeInfo> partColumnTypeInfos) throws HiveException {
    // Create the row object
    List<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
    for (int i = 0; i < partColumnNames.size(); i++) {
        partObjectInspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(partColumnTypeInfos.get(i)));
    }
    StructObjectInspector objectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(partColumnNames, partObjectInspectors);
    ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr);
    ObjectInspector evaluateResultOI = evaluator.initialize(objectInspector);
    return ObjectPair.create((PrimitiveObjectInspector) evaluateResultOI, evaluator);
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ArrayList(java.util.ArrayList) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 87 with PrimitiveTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.

the class GenericUDFUtils method deriveInType.

// Based on update() above.
public static TypeInfo deriveInType(List<ExprNodeDesc> children) {
    TypeInfo returnType = null;
    for (ExprNodeDesc node : children) {
        TypeInfo ti = node.getTypeInfo();
        if (ti.getCategory() == Category.PRIMITIVE && ((PrimitiveTypeInfo) ti).getPrimitiveCategory() == PrimitiveCategory.VOID) {
            continue;
        }
        if (returnType == null) {
            returnType = ti;
            continue;
        }
        if (returnType == ti)
            continue;
        TypeInfo commonTypeInfo = FunctionRegistry.getCommonClass(returnType, ti);
        if (commonTypeInfo == null)
            return null;
        returnType = updateCommonTypeForDecimal(commonTypeInfo, ti, returnType);
    }
    return returnType;
}
Also used : ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) BaseCharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 88 with PrimitiveTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.

the class TypedBytesSerDe method deserializeField.

static Object deserializeField(TypedBytesWritableInput in, TypeInfo type, Object reuse) throws IOException {
    // read the type
    Class<? extends Writable> writableType = in.readType();
    if (writableType != null && writableType.isAssignableFrom(NullWritable.class)) {
        // indicates that the recorded value is null
        return null;
    }
    switch(type.getCategory()) {
        case PRIMITIVE:
            {
                PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type;
                switch(ptype.getPrimitiveCategory()) {
                    case VOID:
                        {
                            return null;
                        }
                    case BOOLEAN:
                        {
                            BooleanWritable r = reuse == null ? new BooleanWritable() : (BooleanWritable) reuse;
                            r = in.readBoolean(r);
                            return r;
                        }
                    case BYTE:
                        {
                            ByteWritable r = reuse == null ? new ByteWritable() : (ByteWritable) reuse;
                            r = in.readByte(r);
                            return r;
                        }
                    case SHORT:
                        {
                            ShortWritable r = reuse == null ? new ShortWritable() : (ShortWritable) reuse;
                            r = in.readShort(r);
                            return r;
                        }
                    case INT:
                        {
                            IntWritable r = reuse == null ? new IntWritable() : (IntWritable) reuse;
                            r = in.readInt(r);
                            return r;
                        }
                    case LONG:
                        {
                            LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse;
                            r = in.readLong(r);
                            return r;
                        }
                    case FLOAT:
                        {
                            FloatWritable r = reuse == null ? new FloatWritable() : (FloatWritable) reuse;
                            r = in.readFloat(r);
                            return r;
                        }
                    case DOUBLE:
                        {
                            DoubleWritable r = reuse == null ? new DoubleWritable() : (DoubleWritable) reuse;
                            r = in.readDouble(r);
                            return r;
                        }
                    case STRING:
                        {
                            Text r = reuse == null ? new Text() : (Text) reuse;
                            r = in.readText(r);
                            return r;
                        }
                    default:
                        {
                            throw new RuntimeException("Unrecognized type: " + ptype.getPrimitiveCategory());
                        }
                }
            }
        // Currently, deserialization of complex types is not supported
        case LIST:
        case MAP:
        case STRUCT:
        default:
            {
                throw new RuntimeException("Unsupported category: " + type.getCategory());
            }
    }
}
Also used : FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Text(org.apache.hadoop.io.Text) LongWritable(org.apache.hadoop.io.LongWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) NullWritable(org.apache.hadoop.io.NullWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) IntWritable(org.apache.hadoop.io.IntWritable)

Example 89 with PrimitiveTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.

the class TestDruidSerDe method serializeObject.

private static void serializeObject(Properties properties, DruidSerDe serDe, Object[] rowObject, DruidWritable druidWritable) throws SerDeException {
    // Build OI with timestamp granularity column
    final List<String> columnNames = new ArrayList<>();
    final List<PrimitiveTypeInfo> columnTypes = new ArrayList<>();
    List<ObjectInspector> inspectors = new ArrayList<>();
    columnNames.addAll(Utilities.getColumnNames(properties));
    columnNames.add(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME);
    columnTypes.addAll(Lists.transform(Utilities.getColumnTypes(properties), new Function<String, PrimitiveTypeInfo>() {

        @Override
        public PrimitiveTypeInfo apply(String type) {
            return TypeInfoFactory.getPrimitiveTypeInfo(type);
        }
    }));
    columnTypes.add(TypeInfoFactory.getPrimitiveTypeInfo("timestamp"));
    inspectors.addAll(Lists.transform(columnTypes, new Function<PrimitiveTypeInfo, ObjectInspector>() {

        @Override
        public ObjectInspector apply(PrimitiveTypeInfo type) {
            return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type);
        }
    }));
    ObjectInspector inspector = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, inspectors);
    // Serialize
    DruidWritable writable = (DruidWritable) serDe.serialize(rowObject, inspector);
    // Check result
    assertEquals(DRUID_WRITABLE.getValue().size(), writable.getValue().size());
    for (Entry<String, Object> e : DRUID_WRITABLE.getValue().entrySet()) {
        assertEquals(e.getValue(), writable.getValue().get(e.getKey()));
    }
}
Also used : Function(com.google.common.base.Function) DruidWritable(org.apache.hadoop.hive.druid.serde.DruidWritable) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 90 with PrimitiveTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo in project hive by apache.

the class HBaseStorageHandler method decomposePredicate.

public static DecomposedPredicate decomposePredicate(JobConf jobConf, HBaseSerDe hBaseSerDe, ExprNodeDesc predicate) {
    ColumnMapping keyMapping = hBaseSerDe.getHBaseSerdeParam().getKeyColumnMapping();
    ColumnMapping tsMapping = hBaseSerDe.getHBaseSerdeParam().getTimestampColumnMapping();
    IndexPredicateAnalyzer analyzer = HiveHBaseTableInputFormat.newIndexPredicateAnalyzer(keyMapping.columnName, keyMapping.isComparable(), tsMapping == null ? null : tsMapping.columnName);
    List<IndexSearchCondition> conditions = new ArrayList<IndexSearchCondition>();
    ExprNodeGenericFuncDesc pushedPredicate = null;
    ExprNodeGenericFuncDesc residualPredicate = (ExprNodeGenericFuncDesc) analyzer.analyzePredicate(predicate, conditions);
    for (List<IndexSearchCondition> searchConditions : HiveHBaseInputFormatUtil.decompose(conditions).values()) {
        int scSize = searchConditions.size();
        if (scSize < 1 || 2 < scSize) {
            // Either there was nothing which could be pushed down (size = 0),
            // there were complex predicates which we don't support yet.
            // Currently supported are one of the form:
            // 1. key < 20                        (size = 1)
            // 2. key = 20                        (size = 1)
            // 3. key < 20 and key > 10           (size = 2)
            // Add to residual
            residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
            continue;
        }
        if (scSize == 2 && (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName()) || searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqual.class.getName()))) {
            // If one of the predicates is =, then any other predicate with it is illegal.
            // Add to residual
            residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
            continue;
        }
        boolean sameType = sameTypeIndexSearchConditions(searchConditions);
        if (!sameType) {
            // If type for column and constant are different, we currently do not support pushing them
            residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
            continue;
        }
        TypeInfo typeInfo = searchConditions.get(0).getColumnDesc().getTypeInfo();
        if (typeInfo.getCategory() == Category.PRIMITIVE && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) == PrimitiveGrouping.NUMERIC_GROUP) {
            // would be returned.
            if (scSize == 2) {
                boolean lowerBound = false;
                boolean upperBound = false;
                if (searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqualOrLessThan.class.getName()) || searchConditions.get(0).getComparisonOp().equals(GenericUDFOPLessThan.class.getName())) {
                    lowerBound = true;
                } else {
                    upperBound = true;
                }
                if (searchConditions.get(1).getComparisonOp().equals(GenericUDFOPEqualOrGreaterThan.class.getName()) || searchConditions.get(1).getComparisonOp().equals(GenericUDFOPGreaterThan.class.getName())) {
                    upperBound = true;
                } else {
                    lowerBound = true;
                }
                if (!upperBound || !lowerBound) {
                    // Not valid range, add to residual
                    residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
                    continue;
                }
            } else {
                // scSize == 1
                if (!searchConditions.get(0).getComparisonOp().equals(GenericUDFOPEqual.class.getName())) {
                    // Not valid range, add to residual
                    residualPredicate = extractResidualCondition(analyzer, searchConditions, residualPredicate);
                    continue;
                }
            }
        }
        // This one can be pushed
        pushedPredicate = extractStorageHandlerCondition(analyzer, searchConditions, pushedPredicate);
    }
    DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
    decomposedPredicate.pushedPredicate = pushedPredicate;
    decomposedPredicate.residualPredicate = residualPredicate;
    return decomposedPredicate;
}
Also used : IndexSearchCondition(org.apache.hadoop.hive.ql.index.IndexSearchCondition) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) ColumnMapping(org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping) IndexPredicateAnalyzer(org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer)

Aggregations

PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)149 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)58 ArrayList (java.util.ArrayList)55 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)48 StructTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo)45 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)41 ListTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo)34 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)33 MapTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo)33 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)26 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)24 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)24 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)23 UnionTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)23 BytesWritable (org.apache.hadoop.io.BytesWritable)22 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)21 Text (org.apache.hadoop.io.Text)21 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)19 Timestamp (java.sql.Timestamp)18 List (java.util.List)18