use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class KafkaJsonSerDe method parseAsPrimitive.
private Object parseAsPrimitive(JsonNode value, TypeInfo typeInfo) throws SerDeException {
switch(TypeInfoFactory.getPrimitiveTypeInfo(typeInfo.getTypeName()).getPrimitiveCategory()) {
case TIMESTAMP:
TimestampWritable timestampWritable = new TimestampWritable();
timestampWritable.setTime(TS_PARSER.get().parseMillis(value.textValue()));
return timestampWritable;
case TIMESTAMPLOCALTZ:
final long numberOfMillis = TS_PARSER.get().parseMillis(value.textValue());
return new TimestampLocalTZWritable(new TimestampTZ(ZonedDateTime.ofInstant(Instant.ofEpochMilli(numberOfMillis), ((TimestampLocalTZTypeInfo) typeInfo).timeZone())));
case BYTE:
return new ByteWritable((byte) value.intValue());
case SHORT:
return (new ShortWritable(value.shortValue()));
case INT:
return new IntWritable(value.intValue());
case LONG:
return (new LongWritable((value.longValue())));
case FLOAT:
return (new FloatWritable(value.floatValue()));
case DOUBLE:
return (new DoubleWritable(value.doubleValue()));
case DECIMAL:
return (new HiveDecimalWritable(HiveDecimal.create(value.decimalValue())));
case CHAR:
return (new HiveCharWritable(new HiveChar(value.textValue(), ((CharTypeInfo) typeInfo).getLength())));
case VARCHAR:
return (new HiveVarcharWritable(new HiveVarchar(value.textValue(), ((CharTypeInfo) typeInfo).getLength())));
case STRING:
return (new Text(value.textValue()));
case BOOLEAN:
return (new BooleanWritable(value.isBoolean() ? value.booleanValue() : Boolean.valueOf(value.textValue())));
default:
throw new SerDeException("Unknown type: " + typeInfo.getTypeName());
}
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestVectorStringExpressions method testVarCharScalarCompareStringCol.
@Test
public // Test VARCHAR literal to string column comparison
void testVarCharScalarCompareStringCol() throws HiveException {
VectorizedRowBatch batch = makeStringBatch();
VectorExpression expr;
expr = new FilterVarCharScalarEqualStringGroupColumn(new HiveVarchar(new String(red2), 8).getValue().getBytes(), 0);
expr.evaluate(batch);
// only red qualifies, and it's in entry 0
Assert.assertTrue(batch.size == 1);
Assert.assertTrue(batch.selected[0] == 0);
batch = makeStringBatch();
expr = new FilterVarCharScalarGreaterStringGroupColumn(new HiveVarchar(new String(red2), 8).getValue().getBytes(), 0);
expr.evaluate(batch);
// only green qualifies, and it's in entry 1
Assert.assertTrue(batch.size == 1);
Assert.assertTrue(batch.selected[0] == 1);
batch = makeStringBatch();
expr = new FilterVarCharScalarLessEqualStringGroupColumn(new HiveVarchar(new String(green), 10).getValue().getBytes(), 0);
expr.evaluate(batch);
// green and red qualify
Assert.assertTrue(batch.size == 2);
Assert.assertTrue(batch.selected[0] == 0);
Assert.assertTrue(batch.selected[1] == 1);
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestVectorStringExpressions method testVarCharScalarCompareStringColProjection.
@Test
public void testVarCharScalarCompareStringColProjection() throws HiveException {
VectorizedRowBatch batch = makeStringBatch();
VectorExpression expr;
expr = new VarCharScalarEqualStringGroupColumn(new HiveVarchar(new String(red2), 8).getValue().getBytes(), 0, 2);
expr.evaluate(batch);
Assert.assertEquals(3, batch.size);
LongColumnVector outVector = (LongColumnVector) batch.cols[2];
Assert.assertEquals(1, outVector.vector[0]);
Assert.assertEquals(0, outVector.vector[1]);
Assert.assertEquals(0, outVector.vector[2]);
batch = makeStringBatch();
expr = new VarCharScalarEqualStringGroupColumn(new HiveVarchar(new String(green), 10).getValue().getBytes(), 0, 2);
expr.evaluate(batch);
Assert.assertEquals(3, batch.size);
outVector = (LongColumnVector) batch.cols[2];
Assert.assertEquals(0, outVector.vector[0]);
Assert.assertEquals(1, outVector.vector[1]);
Assert.assertEquals(0, outVector.vector[2]);
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestGenericUDFCastFormat method testStringTypesToTimestampWithFormat.
@Test
public void testStringTypesToTimestampWithFormat() throws HiveException {
ObjectInspector inputOI = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
testCast(TIMESTAMP, inputOI, "2009-07-30 01:02:03", "yyyy-MM-dd HH24:mi:ss", "2009-07-30 01:02:03");
testCast(TIMESTAMP, inputOI, "07/30/2009 11:0200", "MM/dd/yyyy hh24:miss", "2009-07-30 11:02:00");
testCast(TIMESTAMP, inputOI, "969.07.30.", "yyy.MM.dd.", "2969-07-30 00:00:00");
inputOI = PrimitiveObjectInspectorFactory.javaHiveCharObjectInspector;
testCast(TIMESTAMP, 13, inputOI, new HiveChar("2009-07-30 01:02:03", 13), "yyyy-MM-dd HH24", "2009-07-30 01:00:00");
testCast(TIMESTAMP, 18, inputOI, new HiveChar("07/30/2009 11:0200", 18), "MM/dd/yyyy hh24:miss", "2009-07-30 11:02:00");
testCast(TIMESTAMP, 10, inputOI, new HiveChar("969.07.30.12:00", 10), "yyy.MM.dd.", "2969-07-30 00:00:00");
inputOI = PrimitiveObjectInspectorFactory.javaHiveVarcharObjectInspector;
testCast(TIMESTAMP, 13, inputOI, new HiveVarchar("2009-07-30 01:02:03", 13), "yyyy-MM-dd HH24", "2009-07-30 01:00:00");
testCast(TIMESTAMP, 18, inputOI, new HiveVarchar("07/30/2009 11:0200", 18), "MM/dd/yyyy hh24:miss", "2009-07-30 11:02:00");
testCast(TIMESTAMP, 10, inputOI, new HiveVarchar("969.07.30.12:00", 10), "yyy.MM.dd.", "2969-07-30 00:00:00");
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class GenericUDFCastFormat method convert.
private Object convert(Object o) throws HiveException {
Object input;
switch(inputOI.getPrimitiveCategory()) {
case STRING:
input = ((StringObjectInspector) inputOI).getPrimitiveJavaObject(o);
break;
case CHAR:
input = ((HiveCharObjectInspector) inputOI).getPrimitiveJavaObject(o).getStrippedValue();
break;
case VARCHAR:
input = ((HiveVarcharObjectInspector) inputOI).getPrimitiveJavaObject(o).toString();
break;
case TIMESTAMP:
input = ((TimestampObjectInspector) inputOI).getPrimitiveWritableObject(o).getTimestamp();
break;
case DATE:
input = ((DateObjectInspector) inputOI).getPrimitiveWritableObject(o).get();
break;
default:
throw new HiveException("Input type " + inputOI.getPrimitiveCategory() + " not valid");
}
// format here
Object formattedOutput = null;
if (inputOI.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.DATE || inputOI.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP) {
if (inputOI.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.DATE) {
try {
formattedOutput = formatter.format((Date) input);
} catch (IllegalArgumentException e) {
return null;
}
} else {
try {
formattedOutput = formatter.format((Timestamp) input);
} catch (IllegalArgumentException e) {
return null;
}
}
if (formattedOutput == null) {
return null;
}
}
// parse and create Writables
switch(outputOI.getPrimitiveCategory()) {
case STRING:
return new Text((String) formattedOutput);
case CHAR:
return ((SettableHiveCharObjectInspector) outputOI).create(new HiveChar((String) formattedOutput, -1));
case VARCHAR:
return ((SettableHiveVarcharObjectInspector) outputOI).create(new HiveVarchar((String) formattedOutput, -1));
case TIMESTAMP:
try {
Timestamp t = formatter.parseTimestamp((String) input);
if (t == null) {
return null;
}
return ((SettableTimestampObjectInspector) outputOI).create(t);
} catch (IllegalArgumentException e) {
return null;
}
case DATE:
try {
Date d = formatter.parseDate((String) input);
if (d == null) {
return null;
}
return ((SettableDateObjectInspector) outputOI).create(d);
} catch (IllegalArgumentException e) {
return null;
}
default:
throw new HiveException("Output type " + outputOI.getPrimitiveCategory() + " not valid");
}
}
Aggregations