Search in sources :

Example 31 with HiveVarchar

use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.

the class TestGenericUDFCastFormat method testStringTypesToDateWithFormat.

@Test
public void testStringTypesToDateWithFormat() throws HiveException {
    ObjectInspector inputOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    testCast(DATE, inputOI, "1969-07-30 13:00", "yyyy-MM-dd hh24:mi", "1969-07-30");
    testCast(DATE, inputOI, "307-2009", "ddmm-yyyy", "2009-07-30");
    testCast(DATE, inputOI, "307-2009", "ddd-yyyy", "2009-11-03");
    inputOI = PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector;
    testCast(DATE, inputOI, new HiveChar("1969-07-30 13:00", 15), "yyyy-MM-dd hh24:mi", "1969-07-30");
    testCast(DATE, inputOI, new HiveChar("307-2009", 7), "ddmm-yyyy", "2200-07-30");
    testCast(DATE, inputOI, new HiveChar("307-2009", 7), "ddd-yyyy", "2200-11-03");
    inputOI = PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector;
    testCast(DATE, inputOI, new HiveVarchar("1969-07-30 13:00", 15), "yyyy-MM-dd hh24:mi", "1969-07-30");
    testCast(DATE, inputOI, new HiveVarchar("307-2009", 7), "ddmm-yyyy", "2200-07-30");
    testCast(DATE, inputOI, new HiveVarchar("307-2009", 7), "ddd-yyyy", "2200-11-03");
}
Also used : ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) Test(org.junit.Test)

Example 32 with HiveVarchar

use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.

the class TestGenericUDFCeil method testVarchar.

@Test
public void testVarchar() throws HiveException {
    GenericUDFCeil udf = new GenericUDFCeil();
    HiveVarchar vc = new HiveVarchar("32300.004747", 12);
    HiveVarcharWritable input = new HiveVarcharWritable(vc);
    VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
    LongWritable res = (LongWritable) udf.evaluate(args);
    Assert.assertEquals(32301L, res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) LongWritable(org.apache.hadoop.io.LongWritable) Test(org.junit.Test)

Example 33 with HiveVarchar

use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.

the class HiveJsonReader method visitLeafNode.

/**
 * Visit a node if it is expected to be a primitive value (JSON leaf node).
 *
 * @param leafNode The node pointing at the JSON object
 * @param oi The ObjectInspector to parse the value (must be a
 *          PrimitiveObjectInspector)
 * @return A Java primitive Object
 * @throws SerDeException The SerDe is not configured correctly
 */
private Object visitLeafNode(final JsonNode leafNode, final ObjectInspector oi) throws SerDeException {
    final PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
    final PrimitiveTypeInfo typeInfo = poi.getTypeInfo();
    if (typeInfo.getPrimitiveCategory() != PrimitiveCategory.STRING) {
        Preconditions.checkArgument(leafNode.getNodeType() != JsonNodeType.OBJECT);
        Preconditions.checkArgument(leafNode.getNodeType() != JsonNodeType.ARRAY);
    }
    switch(typeInfo.getPrimitiveCategory()) {
        case INT:
            return Integer.valueOf(leafNode.asInt());
        case BYTE:
            return Byte.valueOf((byte) leafNode.asInt());
        case SHORT:
            return Short.valueOf((short) leafNode.asInt());
        case LONG:
            return Long.valueOf(leafNode.asLong());
        case BOOLEAN:
            return Boolean.valueOf(leafNode.asBoolean());
        case FLOAT:
            return Float.valueOf((float) leafNode.asDouble());
        case DOUBLE:
            return Double.valueOf(leafNode.asDouble());
        case STRING:
            if (leafNode.isValueNode()) {
                return leafNode.asText();
            } else {
                if (isEnabled(Feature.STRINGIFY_COMPLEX_FIELDS)) {
                    return leafNode.toString();
                } else {
                    throw new SerDeException("Complex field found in JSON does not match table definition: " + typeInfo.getTypeName() + ", please consider enabling `" + JsonSerDe.STRINGIFY_COMPLEX + "` table property");
                }
            }
        case BINARY:
            return getByteValue(leafNode);
        case DATE:
            return Date.valueOf(leafNode.asText());
        case TIMESTAMP:
            return tsParser.parseTimestamp(leafNode.asText());
        case DECIMAL:
            return HiveDecimal.create(leafNode.asText());
        case TIMESTAMPLOCALTZ:
            final Timestamp ts = tsParser.parseTimestamp(leafNode.asText());
            final ZoneId zid = ((TimestampLocalTZTypeInfo) typeInfo).timeZone();
            final TimestampTZ tstz = new TimestampTZ();
            tstz.set(ts.toEpochSecond(), ts.getNanos(), zid);
            return tstz;
        case VARCHAR:
            return new HiveVarchar(leafNode.asText(), ((BaseCharTypeInfo) typeInfo).getLength());
        case CHAR:
            return new HiveChar(leafNode.asText(), ((BaseCharTypeInfo) typeInfo).getLength());
        default:
            throw new SerDeException("Could not convert from string to type: " + typeInfo.getTypeName());
    }
}
Also used : TimestampTZ(org.apache.hadoop.hive.common.type.TimestampTZ) ZoneId(java.time.ZoneId) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) TimestampLocalTZTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Example 34 with HiveVarchar

use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.

the class PrimitiveObjectInspectorUtils method getHiveVarchar.

public static HiveVarchar getHiveVarchar(Object o, PrimitiveObjectInspector oi) {
    if (o == null) {
        return null;
    }
    HiveVarchar result = null;
    switch(oi.getPrimitiveCategory()) {
        case VARCHAR:
            result = ((HiveVarcharObjectInspector) oi).getPrimitiveJavaObject(o);
            break;
        default:
            // Is there a way to provide char length here?
            // It might actually be ok as long as there is an object inspector (with char length)
            // receiving this value.
            result = new HiveVarchar();
            result.setValue(getString(o, oi));
            break;
    }
    return result;
}
Also used : HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar)

Example 35 with HiveVarchar

use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.

the class TestGenericUDFLikeAny method testTrue.

@Test
public void testTrue() throws HiveException {
    udf = new GenericUDFLikeAny();
    ObjectInspector valueOIOne = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    ObjectInspector valueOITwo = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    ObjectInspector valueOIThree = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    ObjectInspector[] arguments = { valueOIOne, valueOITwo, valueOIThree };
    udf.initialize(arguments);
    DeferredJavaObject valueObjOne = new DeferredJavaObject(new Text("abc"));
    DeferredJavaObject valueObjTwo = new DeferredJavaObject(new Text("%b%"));
    HiveVarchar vc = new HiveVarchar();
    vc.setValue("a%");
    GenericUDF.DeferredJavaObject[] args = { valueObjOne, valueObjTwo, new GenericUDF.DeferredJavaObject(new HiveVarcharWritable(vc)) };
    BooleanWritable output = (BooleanWritable) udf.evaluate(args);
    assertEquals(true, output.get());
}
Also used : DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) Test(org.junit.Test)

Aggregations

HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)95 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)61 Test (org.junit.Test)35 Text (org.apache.hadoop.io.Text)31 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)28 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)27 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)26 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)23 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)21 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)21 ArrayList (java.util.ArrayList)20 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)20 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)20 LongWritable (org.apache.hadoop.io.LongWritable)19 Date (org.apache.hadoop.hive.common.type.Date)18 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)18 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)17 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)17 BooleanWritable (org.apache.hadoop.io.BooleanWritable)17 FloatWritable (org.apache.hadoop.io.FloatWritable)17