Search in sources :

Example 66 with PrimitiveCategory

use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project SQLWindowing by hbutani.

the class TranslateUtils method validateValueBoundaryExprType.

public static void validateValueBoundaryExprType(ObjectInspector OI) throws WindowingException {
    if (!OI.getCategory().equals(Category.PRIMITIVE)) {
        throw new WindowingException("Value Boundary expression must be of primitve type");
    }
    PrimitiveObjectInspector pOI = (PrimitiveObjectInspector) OI;
    PrimitiveCategory pC = pOI.getPrimitiveCategory();
    switch(pC) {
        case BYTE:
        case DOUBLE:
        case FLOAT:
        case INT:
        case LONG:
        case SHORT:
        case TIMESTAMP:
            break;
        default:
            throw new WindowingException(sprintf("Primitve type %s not supported in Value Boundary expression", pC));
    }
}
Also used : WindowingException(com.sap.hadoop.windowing.WindowingException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)

Example 67 with PrimitiveCategory

use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project drill by apache.

the class HiveFieldConverter method create.

public static HiveFieldConverter create(TypeInfo typeInfo, FragmentContext fragmentContext) throws IllegalAccessException, InstantiationException {
    switch(typeInfo.getCategory()) {
        case PRIMITIVE:
            final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
            if (pCat != PrimitiveCategory.DECIMAL) {
                Class<? extends HiveFieldConverter> clazz = primMap.get(pCat);
                if (clazz != null) {
                    return clazz.newInstance();
                }
            } else {
                // For decimal, based on precision return appropriate converter.
                DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
                int precision = decimalTypeInfo.precision();
                int scale = decimalTypeInfo.scale();
                if (precision <= 9) {
                    return new Decimal9(precision, scale);
                } else if (precision <= 18) {
                    return new Decimal18(precision, scale);
                } else if (precision <= 28) {
                    return new Decimal28(precision, scale, fragmentContext);
                } else {
                    return new Decimal38(precision, scale, fragmentContext);
                }
            }
            throwUnsupportedHiveDataTypeError(pCat.toString());
            break;
        case LIST:
        case MAP:
        case STRUCT:
        case UNION:
        default:
            throwUnsupportedHiveDataTypeError(typeInfo.getCategory().toString());
    }
    return null;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 68 with PrimitiveCategory

use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project drill by apache.

the class HiveUtilities method convertPartitionType.

/** Partition value is received in string format. Convert it into appropriate object based on the type. */
public static Object convertPartitionType(TypeInfo typeInfo, String value, final String defaultPartitionValue) {
    if (typeInfo.getCategory() != Category.PRIMITIVE) {
        // In Hive only primitive types are allowed as partition column types.
        throw new DrillRuntimeException("Non-Primitive types are not allowed as partition column type in Hive, " + "but received one: " + typeInfo.getCategory());
    }
    if (defaultPartitionValue.equals(value)) {
        return null;
    }
    final PrimitiveCategory pCat = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
    try {
        switch(pCat) {
            case BINARY:
                return value.getBytes();
            case BOOLEAN:
                return Boolean.parseBoolean(value);
            case DECIMAL:
                {
                    DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
                    return HiveDecimalUtils.enforcePrecisionScale(HiveDecimal.create(value), decimalTypeInfo.precision(), decimalTypeInfo.scale());
                }
            case DOUBLE:
                return Double.parseDouble(value);
            case FLOAT:
                return Float.parseFloat(value);
            case BYTE:
            case SHORT:
            case INT:
                return Integer.parseInt(value);
            case LONG:
                return Long.parseLong(value);
            case STRING:
            case VARCHAR:
                return value.getBytes();
            case CHAR:
                return value.trim().getBytes();
            case TIMESTAMP:
                return Timestamp.valueOf(value);
            case DATE:
                return Date.valueOf(value);
        }
    } catch (final Exception e) {
        // In Hive, partition values that can't be converted from string are considered to be NULL.
        logger.trace("Failed to interpret '{}' value from partition value string '{}'", pCat, value);
        return null;
    }
    throwUnsupportedHiveDataTypeError(pCat.toString());
    return null;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) UserException(org.apache.drill.common.exceptions.UserException) DrillRuntimeException(org.apache.drill.common.exceptions.DrillRuntimeException) ExecutionSetupException(org.apache.drill.common.exceptions.ExecutionSetupException)

Example 69 with PrimitiveCategory

use of org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory in project cdap by caskdata.

the class ObjectInspectorFactory method getReflectionObjectInspectorNoCache.

private static ObjectInspector getReflectionObjectInspectorNoCache(Type t) {
    if (t instanceof GenericArrayType) {
        GenericArrayType at = (GenericArrayType) t;
        return getStandardListObjectInspector(getReflectionObjectInspector(at.getGenericComponentType()));
    }
    Map<TypeVariable, Type> genericTypes = null;
    if (t instanceof ParameterizedType) {
        ParameterizedType pt = (ParameterizedType) t;
        Type rawType = pt.getRawType();
        // Collection?
        if (Collection.class.isAssignableFrom((Class<?>) rawType)) {
            return getStandardListObjectInspector(getReflectionObjectInspector(pt.getActualTypeArguments()[0]));
        }
        // Map?
        if (Map.class.isAssignableFrom((Class<?>) rawType)) {
            return getStandardMapObjectInspector(getReflectionObjectInspector(pt.getActualTypeArguments()[0]), getReflectionObjectInspector(pt.getActualTypeArguments()[1]));
        }
        // Otherwise convert t to RawType so we will fall into the following if block.
        t = rawType;
        ImmutableMap.Builder<TypeVariable, Type> builder = ImmutableMap.builder();
        for (int i = 0; i < pt.getActualTypeArguments().length; i++) {
            builder.put(((Class<?>) t).getTypeParameters()[i], pt.getActualTypeArguments()[i]);
        }
        genericTypes = builder.build();
    }
    // Must be a class.
    if (!(t instanceof Class)) {
        throw new RuntimeException(ObjectInspectorFactory.class.getName() + " internal error:" + t);
    }
    Class<?> c = (Class<?>) t;
    // Java Primitive Type?
    if (PrimitiveObjectInspectorUtils.isPrimitiveJavaType(c)) {
        return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveJavaType(c).primitiveCategory);
    }
    // Java Primitive Class?
    if (PrimitiveObjectInspectorUtils.isPrimitiveJavaClass(c)) {
        return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveJavaClass(c).primitiveCategory);
    }
    // Primitive Writable class?
    if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(c)) {
        return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveWritableClass(c).primitiveCategory);
    }
    // Enum class?
    if (Enum.class.isAssignableFrom(c)) {
        return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING);
    }
    // Array
    if (c.isArray()) {
        return getStandardListObjectInspector(getReflectionObjectInspector(c.getComponentType()));
    }
    // Must be struct because List and Map need to be ParameterizedType
    Preconditions.checkState(!List.class.isAssignableFrom(c));
    Preconditions.checkState(!Map.class.isAssignableFrom(c));
    Preconditions.checkState(!c.isInterface(), "Cannot inspect an interface.");
    ReflectionStructObjectInspector oi = new ReflectionStructObjectInspector();
    // put it into the cache BEFORE it is initialized to make sure we can catch
    // recursive types.
    objectInspectorCache.put(t, oi);
    Field[] fields = ObjectInspectorUtils.getDeclaredNonStaticFields(c);
    List<ObjectInspector> structFieldObjectInspectors = new ArrayList<>(fields.length);
    for (Field field : fields) {
        // "this" pointer present in nested classes and that references the parent.
        if (Modifier.isTransient(field.getModifiers()) || field.isSynthetic()) {
            continue;
        }
        if (!oi.shouldIgnoreField(field.getName())) {
            Type newType = field.getGenericType();
            if (newType instanceof TypeVariable) {
                Preconditions.checkNotNull(genericTypes, "Type was not recognized as a parameterized type.");
                Preconditions.checkNotNull(genericTypes.get(newType), "Generic type " + newType + " not a parameter of class " + c);
                newType = genericTypes.get(newType);
            }
            structFieldObjectInspectors.add(getReflectionObjectInspector(newType));
        }
    }
    oi.init(c, structFieldObjectInspectors);
    return oi;
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ArrayList(java.util.ArrayList) GenericArrayType(java.lang.reflect.GenericArrayType) ImmutableMap(com.google.common.collect.ImmutableMap) ParameterizedType(java.lang.reflect.ParameterizedType) Field(java.lang.reflect.Field) GenericArrayType(java.lang.reflect.GenericArrayType) ParameterizedType(java.lang.reflect.ParameterizedType) Type(java.lang.reflect.Type) TypeVariable(java.lang.reflect.TypeVariable) PrimitiveObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory)

Aggregations

PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)58 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)34 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)22 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)13 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)10 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)10 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)10 ArrayList (java.util.ArrayList)9 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)9 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)9 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)8 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)8 Text (org.apache.hadoop.io.Text)8 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)7 BytesWritable (org.apache.hadoop.io.BytesWritable)7 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)6 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)6 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)6 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)6 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)6