use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.
the class TestGenericUDFInternalInterval method testNullBypass.
@Test
public void testNullBypass() throws Exception {
try (GenericUDFInternalInterval udf = new GenericUDFInternalInterval()) {
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, new IntWritable(HiveParser.TOK_INTERVAL_DAY_LITERAL)), PrimitiveObjectInspectorFactory.writableStringObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(new ByteWritable((byte) 4)), new DeferredJavaObject(null) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intervalDayTimeTypeInfo, oi.getTypeInfo());
HiveIntervalDayTimeWritable res = (HiveIntervalDayTimeWritable) udf.evaluate(args);
Assert.assertEquals(null, res);
}
}
use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.
the class TestGenericUDFOPMultiply method testByteTimesShort.
@Test
public void testByteTimesShort() throws HiveException {
GenericUDFOPMultiply udf = new GenericUDFOPMultiply();
ByteWritable left = new ByteWritable((byte) 4);
ShortWritable right = new ShortWritable((short) 6);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableByteObjectInspector, PrimitiveObjectInspectorFactory.writableShortObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.shortTypeInfo);
ShortWritable res = (ShortWritable) udf.evaluate(args);
Assert.assertEquals(24, res.get());
}
use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.
the class TestGenericUDFNullif method testByteNeq.
@Test
public void testByteNeq() throws HiveException {
GenericUDFNullif udf = new GenericUDFNullif();
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableByteObjectInspector, PrimitiveObjectInspectorFactory.writableByteObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(new ByteWritable((byte) 4)), new DeferredJavaObject(new ByteWritable((byte) 1)) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.byteTypeInfo, oi.getTypeInfo());
ByteWritable res = (ByteWritable) udf.evaluate(args);
Assert.assertEquals(4, res.get());
}
use of org.apache.hadoop.hive.serde2.io.ByteWritable in project presto by prestodb.
the class RcFileTester method decodeRecordReaderValue.
private static Object decodeRecordReaderValue(Type type, Object actualValue) {
if (actualValue instanceof LazyPrimitive) {
actualValue = ((LazyPrimitive<?, ?>) actualValue).getWritableObject();
}
if (actualValue instanceof BooleanWritable) {
actualValue = ((BooleanWritable) actualValue).get();
} else if (actualValue instanceof ByteWritable) {
actualValue = ((ByteWritable) actualValue).get();
} else if (actualValue instanceof BytesWritable) {
actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
} else if (actualValue instanceof DateWritable) {
actualValue = new SqlDate(((DateWritable) actualValue).getDays());
} else if (actualValue instanceof DoubleWritable) {
actualValue = ((DoubleWritable) actualValue).get();
} else if (actualValue instanceof FloatWritable) {
actualValue = ((FloatWritable) actualValue).get();
} else if (actualValue instanceof IntWritable) {
actualValue = ((IntWritable) actualValue).get();
} else if (actualValue instanceof LongWritable) {
actualValue = ((LongWritable) actualValue).get();
} else if (actualValue instanceof ShortWritable) {
actualValue = ((ShortWritable) actualValue).get();
} else if (actualValue instanceof HiveDecimalWritable) {
DecimalType decimalType = (DecimalType) type;
HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
// writable messes with the scale so rescale the values to the Presto type
BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
} else if (actualValue instanceof Text) {
actualValue = actualValue.toString();
} else if (actualValue instanceof TimestampWritable) {
TimestampWritable timestamp = (TimestampWritable) actualValue;
actualValue = new SqlTimestamp((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), UTC_KEY);
} else if (actualValue instanceof StructObject) {
StructObject structObject = (StructObject) actualValue;
actualValue = decodeRecordReaderStruct(type, structObject.getFieldsAsList());
} else if (actualValue instanceof LazyBinaryArray) {
actualValue = decodeRecordReaderList(type, ((LazyBinaryArray) actualValue).getList());
} else if (actualValue instanceof LazyBinaryMap) {
actualValue = decodeRecordReaderMap(type, ((LazyBinaryMap) actualValue).getMap());
} else if (actualValue instanceof LazyArray) {
actualValue = decodeRecordReaderList(type, ((LazyArray) actualValue).getList());
} else if (actualValue instanceof LazyMap) {
actualValue = decodeRecordReaderMap(type, ((LazyMap) actualValue).getMap());
} else if (actualValue instanceof List) {
actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
}
return actualValue;
}
use of org.apache.hadoop.hive.serde2.io.ByteWritable in project hive by apache.
the class DruidSerDe method deserialize.
@Override
public Object deserialize(Writable writable) throws SerDeException {
final DruidWritable input = (DruidWritable) writable;
final List<Object> output = Lists.newArrayListWithExpectedSize(columns.length);
for (int i = 0; i < columns.length; i++) {
final Object value = input.getValue().get(columns[i]);
if (value == null) {
output.add(null);
continue;
}
switch(types[i].getPrimitiveCategory()) {
case TIMESTAMP:
output.add(new TimestampWritable(Timestamp.valueOf(ZonedDateTime.ofInstant(Instant.ofEpochMilli(((Number) value).longValue()), tsTZTypeInfo.timeZone()).format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")).toString())));
break;
case TIMESTAMPLOCALTZ:
output.add(new TimestampLocalTZWritable(new TimestampTZ(ZonedDateTime.ofInstant(Instant.ofEpochMilli(((Number) value).longValue()), ((TimestampLocalTZTypeInfo) types[i]).timeZone()))));
break;
case BYTE:
output.add(new ByteWritable(((Number) value).byteValue()));
break;
case SHORT:
output.add(new ShortWritable(((Number) value).shortValue()));
break;
case INT:
output.add(new IntWritable(((Number) value).intValue()));
break;
case LONG:
output.add(new LongWritable(((Number) value).longValue()));
break;
case FLOAT:
output.add(new FloatWritable(((Number) value).floatValue()));
break;
case DOUBLE:
output.add(new DoubleWritable(((Number) value).doubleValue()));
break;
case DECIMAL:
output.add(new HiveDecimalWritable(HiveDecimal.create(((Number) value).doubleValue())));
break;
case CHAR:
output.add(new HiveCharWritable(new HiveChar(value.toString(), ((CharTypeInfo) types[i]).getLength())));
break;
case VARCHAR:
output.add(new HiveVarcharWritable(new HiveVarchar(value.toString(), ((VarcharTypeInfo) types[i]).getLength())));
break;
case STRING:
output.add(new Text(value.toString()));
break;
case BOOLEAN:
output.add(new BooleanWritable(Boolean.valueOf(value.toString())));
break;
default:
throw new SerDeException("Unknown type: " + types[i].getPrimitiveCategory());
}
}
return output;
}
Aggregations