use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestGenericUDFFloor method testChar.
@Test
public void testChar() throws HiveException {
GenericUDFFloor udf = new GenericUDFFloor();
HiveChar vc = new HiveChar("32300.004747", 12);
HiveCharWritable input = new HiveCharWritable(vc);
CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
LongWritable res = (LongWritable) udf.evaluate(args);
Assert.assertEquals(32300L, res.get());
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class KafkaJsonSerDe method parseAsPrimitive.
private Object parseAsPrimitive(JsonNode value, TypeInfo typeInfo) throws SerDeException {
switch(TypeInfoFactory.getPrimitiveTypeInfo(typeInfo.getTypeName()).getPrimitiveCategory()) {
case TIMESTAMP:
TimestampWritable timestampWritable = new TimestampWritable();
timestampWritable.setTime(TS_PARSER.get().parseMillis(value.textValue()));
return timestampWritable;
case TIMESTAMPLOCALTZ:
final long numberOfMillis = TS_PARSER.get().parseMillis(value.textValue());
return new TimestampLocalTZWritable(new TimestampTZ(ZonedDateTime.ofInstant(Instant.ofEpochMilli(numberOfMillis), ((TimestampLocalTZTypeInfo) typeInfo).timeZone())));
case BYTE:
return new ByteWritable((byte) value.intValue());
case SHORT:
return (new ShortWritable(value.shortValue()));
case INT:
return new IntWritable(value.intValue());
case LONG:
return (new LongWritable((value.longValue())));
case FLOAT:
return (new FloatWritable(value.floatValue()));
case DOUBLE:
return (new DoubleWritable(value.doubleValue()));
case DECIMAL:
return (new HiveDecimalWritable(HiveDecimal.create(value.decimalValue())));
case CHAR:
return (new HiveCharWritable(new HiveChar(value.textValue(), ((CharTypeInfo) typeInfo).getLength())));
case VARCHAR:
return (new HiveVarcharWritable(new HiveVarchar(value.textValue(), ((CharTypeInfo) typeInfo).getLength())));
case STRING:
return (new Text(value.textValue()));
case BOOLEAN:
return (new BooleanWritable(value.isBoolean() ? value.booleanValue() : Boolean.valueOf(value.textValue())));
default:
throw new SerDeException("Unknown type: " + typeInfo.getTypeName());
}
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestVectorStringExpressions method testStringColCompareCharScalarProjection.
@Test
public void testStringColCompareCharScalarProjection() throws HiveException {
VectorizedRowBatch batch = makeStringBatch();
VectorExpression expr;
expr = new StringGroupColEqualCharScalar(0, new HiveChar(new String(red2), 8).getStrippedValue().getBytes(), 2);
expr.evaluate(batch);
Assert.assertEquals(3, batch.size);
LongColumnVector outVector = (LongColumnVector) batch.cols[2];
Assert.assertEquals(1, outVector.vector[0]);
Assert.assertEquals(0, outVector.vector[1]);
Assert.assertEquals(0, outVector.vector[2]);
batch = makeStringBatch();
expr = new StringGroupColEqualCharScalar(0, new HiveChar(new String(green), 10).getStrippedValue().getBytes(), 2);
expr.evaluate(batch);
Assert.assertEquals(3, batch.size);
outVector = (LongColumnVector) batch.cols[2];
Assert.assertEquals(0, outVector.vector[0]);
Assert.assertEquals(1, outVector.vector[1]);
Assert.assertEquals(0, outVector.vector[2]);
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestVectorStringExpressions method testCharScalarConcatCol.
@Test
public void testCharScalarConcatCol() throws HiveException {
// has nulls, not repeating
VectorizedRowBatch batch = makeStringBatch();
StringScalarConcatStringGroupCol expr = new StringScalarConcatStringGroupCol(new HiveChar(new String(red), 6).getStrippedValue().getBytes(), 0, 1);
expr.evaluate(batch);
BytesColumnVector outCol = (BytesColumnVector) batch.cols[1];
int cmp = StringExpr.compare(redred, 0, redred.length, outCol.vector[0], outCol.start[0], outCol.length[0]);
Assert.assertEquals(0, cmp);
Assert.assertTrue(outCol.isNull[2]);
int cmp2 = StringExpr.compare(redgreen, 0, redgreen.length, outCol.vector[1], outCol.start[1], outCol.length[1]);
Assert.assertEquals(0, cmp2);
Assert.assertFalse(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
// no nulls, not repeating
batch = makeStringBatch();
batch.cols[0].noNulls = true;
expr.evaluate(batch);
outCol = (BytesColumnVector) batch.cols[1];
cmp = StringExpr.compare(redred, 0, redred.length, outCol.vector[0], outCol.start[0], outCol.length[0]);
Assert.assertEquals(0, cmp);
cmp2 = StringExpr.compare(redgreen, 0, redgreen.length, outCol.vector[1], outCol.start[1], outCol.length[1]);
Assert.assertEquals(0, cmp2);
int cmp3 = StringExpr.compare(red, 0, red.length, outCol.vector[2], outCol.start[2], outCol.length[2]);
Assert.assertEquals(0, cmp3);
Assert.assertTrue(outCol.noNulls);
Assert.assertFalse(outCol.isRepeating);
// has nulls, is repeating
batch = makeStringBatch();
batch.cols[0].isRepeating = true;
expr.evaluate(batch);
outCol = (BytesColumnVector) batch.cols[1];
cmp = StringExpr.compare(redred, 0, redred.length, outCol.vector[0], outCol.start[0], outCol.length[0]);
Assert.assertEquals(0, cmp);
Assert.assertTrue(outCol.isRepeating);
// no nulls, is repeating
batch = makeStringBatch();
batch.cols[0].isRepeating = true;
batch.cols[0].noNulls = true;
expr.evaluate(batch);
outCol = (BytesColumnVector) batch.cols[1];
cmp = StringExpr.compare(redred, 0, redred.length, outCol.vector[0], outCol.start[0], outCol.length[0]);
Assert.assertEquals(0, cmp);
Assert.assertTrue(outCol.isRepeating);
Assert.assertTrue(outCol.noNulls);
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestVectorStringExpressions method testStringColCompareCharScalarFilter.
@Test
public // Test string column to CHAR literal comparison
void testStringColCompareCharScalarFilter() throws HiveException {
VectorizedRowBatch batch = makeStringBatch();
VectorExpression expr;
expr = new FilterStringGroupColEqualCharScalar(0, new HiveChar(new String(red2), 10).getStrippedValue().getBytes());
expr.evaluate(batch);
// only red qualifies, and it's in entry 0
Assert.assertTrue(batch.size == 1);
Assert.assertTrue(batch.selected[0] == 0);
batch = makeStringBatch();
expr = new FilterStringGroupColLessCharScalar(0, new HiveChar(new String(red2), 8).getStrippedValue().getBytes());
expr.evaluate(batch);
// only green qualifies, and it's in entry 1
Assert.assertTrue(batch.size == 1);
Assert.assertTrue(batch.selected[0] == 1);
batch = makeStringBatch();
expr = new FilterStringGroupColGreaterEqualCharScalar(0, new HiveChar(new String(green), 12).getStrippedValue().getBytes());
expr.evaluate(batch);
// green and red qualify
Assert.assertTrue(batch.size == 2);
Assert.assertTrue(batch.selected[0] == 0);
Assert.assertTrue(batch.selected[1] == 1);
}
Aggregations