use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestGenericUDFCastFormat method testStringTypesToDateWithFormat.
@Test
public void testStringTypesToDateWithFormat() throws HiveException {
ObjectInspector inputOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
testCast(DATE, inputOI, "1969-07-30 13:00", "yyyy-MM-dd hh24:mi", "1969-07-30");
testCast(DATE, inputOI, "307-2009", "ddmm-yyyy", "2009-07-30");
testCast(DATE, inputOI, "307-2009", "ddd-yyyy", "2009-11-03");
inputOI = PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector;
testCast(DATE, inputOI, new HiveChar("1969-07-30 13:00", 15), "yyyy-MM-dd hh24:mi", "1969-07-30");
testCast(DATE, inputOI, new HiveChar("307-2009", 7), "ddmm-yyyy", "2200-07-30");
testCast(DATE, inputOI, new HiveChar("307-2009", 7), "ddd-yyyy", "2200-11-03");
inputOI = PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector;
testCast(DATE, inputOI, new HiveVarchar("1969-07-30 13:00", 15), "yyyy-MM-dd hh24:mi", "1969-07-30");
testCast(DATE, inputOI, new HiveVarchar("307-2009", 7), "ddmm-yyyy", "2200-07-30");
testCast(DATE, inputOI, new HiveVarchar("307-2009", 7), "ddd-yyyy", "2200-11-03");
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestGenericUDFCeil method testVarchar.
@Test
public void testVarchar() throws HiveException {
GenericUDFCeil udf = new GenericUDFCeil();
HiveVarchar vc = new HiveVarchar("32300.004747", 12);
HiveVarcharWritable input = new HiveVarcharWritable(vc);
VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
LongWritable res = (LongWritable) udf.evaluate(args);
Assert.assertEquals(32301L, res.get());
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class HiveJsonReader method visitLeafNode.
/**
* Visit a node if it is expected to be a primitive value (JSON leaf node).
*
* @param leafNode The node pointing at the JSON object
* @param oi The ObjectInspector to parse the value (must be a
* PrimitiveObjectInspector)
* @return A Java primitive Object
* @throws SerDeException The SerDe is not configured correctly
*/
private Object visitLeafNode(final JsonNode leafNode, final ObjectInspector oi) throws SerDeException {
final PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
final PrimitiveTypeInfo typeInfo = poi.getTypeInfo();
if (typeInfo.getPrimitiveCategory() != PrimitiveCategory.STRING) {
Preconditions.checkArgument(leafNode.getNodeType() != JsonNodeType.OBJECT);
Preconditions.checkArgument(leafNode.getNodeType() != JsonNodeType.ARRAY);
}
switch(typeInfo.getPrimitiveCategory()) {
case INT:
return Integer.valueOf(leafNode.asInt());
case BYTE:
return Byte.valueOf((byte) leafNode.asInt());
case SHORT:
return Short.valueOf((short) leafNode.asInt());
case LONG:
return Long.valueOf(leafNode.asLong());
case BOOLEAN:
return Boolean.valueOf(leafNode.asBoolean());
case FLOAT:
return Float.valueOf((float) leafNode.asDouble());
case DOUBLE:
return Double.valueOf(leafNode.asDouble());
case STRING:
if (leafNode.isValueNode()) {
return leafNode.asText();
} else {
if (isEnabled(Feature.STRINGIFY_COMPLEX_FIELDS)) {
return leafNode.toString();
} else {
throw new SerDeException("Complex field found in JSON does not match table definition: " + typeInfo.getTypeName() + ", please consider enabling `" + JsonSerDe.STRINGIFY_COMPLEX + "` table property");
}
}
case BINARY:
return getByteValue(leafNode);
case DATE:
return Date.valueOf(leafNode.asText());
case TIMESTAMP:
return tsParser.parseTimestamp(leafNode.asText());
case DECIMAL:
return HiveDecimal.create(leafNode.asText());
case TIMESTAMPLOCALTZ:
final Timestamp ts = tsParser.parseTimestamp(leafNode.asText());
final ZoneId zid = ((TimestampLocalTZTypeInfo) typeInfo).timeZone();
final TimestampTZ tstz = new TimestampTZ();
tstz.set(ts.toEpochSecond(), ts.getNanos(), zid);
return tstz;
case VARCHAR:
return new HiveVarchar(leafNode.asText(), ((BaseCharTypeInfo) typeInfo).getLength());
case CHAR:
return new HiveChar(leafNode.asText(), ((BaseCharTypeInfo) typeInfo).getLength());
default:
throw new SerDeException("Could not convert from string to type: " + typeInfo.getTypeName());
}
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class PrimitiveObjectInspectorUtils method getHiveVarchar.
public static HiveVarchar getHiveVarchar(Object o, PrimitiveObjectInspector oi) {
if (o == null) {
return null;
}
HiveVarchar result = null;
switch(oi.getPrimitiveCategory()) {
case VARCHAR:
result = ((HiveVarcharObjectInspector) oi).getPrimitiveJavaObject(o);
break;
default:
// Is there a way to provide char length here?
// It might actually be ok as long as there is an object inspector (with char length)
// receiving this value.
result = new HiveVarchar();
result.setValue(getString(o, oi));
break;
}
return result;
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestGenericUDFLikeAny method testTrue.
@Test
public void testTrue() throws HiveException {
udf = new GenericUDFLikeAny();
ObjectInspector valueOIOne = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector valueOITwo = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector valueOIThree = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector[] arguments = { valueOIOne, valueOITwo, valueOIThree };
udf.initialize(arguments);
DeferredJavaObject valueObjOne = new DeferredJavaObject(new Text("abc"));
DeferredJavaObject valueObjTwo = new DeferredJavaObject(new Text("%b%"));
HiveVarchar vc = new HiveVarchar();
vc.setValue("a%");
GenericUDF.DeferredJavaObject[] args = { valueObjOne, valueObjTwo, new GenericUDF.DeferredJavaObject(new HiveVarcharWritable(vc)) };
BooleanWritable output = (BooleanWritable) udf.evaluate(args);
assertEquals(true, output.get());
}
Aggregations