use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project nifi by apache.
the class NiFiOrcUtils method convertToORCObject.
public static Object convertToORCObject(TypeInfo typeInfo, Object o) {
if (o != null) {
if (typeInfo instanceof UnionTypeInfo) {
OrcUnion union = new OrcUnion();
// Need to find which of the union types correspond to the primitive object
TypeInfo objectTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(ObjectInspectorFactory.getReflectionObjectInspector(o.getClass(), ObjectInspectorFactory.ObjectInspectorOptions.JAVA));
List<TypeInfo> unionTypeInfos = ((UnionTypeInfo) typeInfo).getAllUnionObjectTypeInfos();
int index = 0;
while (index < unionTypeInfos.size() && !unionTypeInfos.get(index).equals(objectTypeInfo)) {
index++;
}
if (index < unionTypeInfos.size()) {
union.set((byte) index, convertToORCObject(objectTypeInfo, o));
} else {
throw new IllegalArgumentException("Object Type for class " + o.getClass().getName() + " not in Union declaration");
}
return union;
}
if (o instanceof Integer) {
return new IntWritable((int) o);
}
if (o instanceof Boolean) {
return new BooleanWritable((boolean) o);
}
if (o instanceof Long) {
return new LongWritable((long) o);
}
if (o instanceof Float) {
return new FloatWritable((float) o);
}
if (o instanceof Double) {
return new DoubleWritable((double) o);
}
if (o instanceof String || o instanceof Utf8 || o instanceof GenericData.EnumSymbol) {
return new Text(o.toString());
}
if (o instanceof ByteBuffer) {
return new BytesWritable(((ByteBuffer) o).array());
}
if (o instanceof int[]) {
int[] intArray = (int[]) o;
return Arrays.stream(intArray).mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("int"), element)).collect(Collectors.toList());
}
if (o instanceof long[]) {
long[] longArray = (long[]) o;
return Arrays.stream(longArray).mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("bigint"), element)).collect(Collectors.toList());
}
if (o instanceof float[]) {
float[] floatArray = (float[]) o;
return IntStream.range(0, floatArray.length).mapToDouble(i -> floatArray[i]).mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("float"), (float) element)).collect(Collectors.toList());
}
if (o instanceof double[]) {
double[] doubleArray = (double[]) o;
return Arrays.stream(doubleArray).mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("double"), element)).collect(Collectors.toList());
}
if (o instanceof boolean[]) {
boolean[] booleanArray = (boolean[]) o;
return IntStream.range(0, booleanArray.length).map(i -> booleanArray[i] ? 1 : 0).mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("boolean"), element == 1)).collect(Collectors.toList());
}
if (o instanceof GenericData.Array) {
GenericData.Array array = ((GenericData.Array) o);
// The type information in this case is interpreted as a List
TypeInfo listTypeInfo = ((ListTypeInfo) typeInfo).getListElementTypeInfo();
return array.stream().map((element) -> convertToORCObject(listTypeInfo, element)).collect(Collectors.toList());
}
if (o instanceof List) {
return o;
}
if (o instanceof Map) {
Map map = new HashMap();
TypeInfo keyInfo = ((MapTypeInfo) typeInfo).getMapKeyTypeInfo();
TypeInfo valueInfo = ((MapTypeInfo) typeInfo).getMapValueTypeInfo();
// Unions are not allowed as key/value types, so if we convert the key and value objects,
// they should return Writable objects
((Map) o).forEach((key, value) -> {
Object keyObject = convertToORCObject(keyInfo, key);
Object valueObject = convertToORCObject(valueInfo, value);
if (keyObject == null) {
throw new IllegalArgumentException("Maps' key cannot be null");
}
map.put(keyObject, valueObject);
});
return map;
}
if (o instanceof GenericData.Record) {
GenericData.Record record = (GenericData.Record) o;
TypeInfo recordSchema = NiFiOrcUtils.getOrcField(record.getSchema());
List<Schema.Field> recordFields = record.getSchema().getFields();
if (recordFields != null) {
Object[] fieldObjects = new Object[recordFields.size()];
for (int i = 0; i < recordFields.size(); i++) {
Schema.Field field = recordFields.get(i);
Schema fieldSchema = field.schema();
Object fieldObject = record.get(field.name());
fieldObjects[i] = NiFiOrcUtils.convertToORCObject(NiFiOrcUtils.getOrcField(fieldSchema), fieldObject);
}
return NiFiOrcUtils.createOrcStruct(recordSchema, fieldObjects);
}
}
throw new IllegalArgumentException("Error converting object of type " + o.getClass().getName() + " to ORC type " + typeInfo.getTypeName());
} else {
return null;
}
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project parquet-mr by apache.
the class TestParquetSerDe method testParquetHiveSerDe.
public void testParquetHiveSerDe() throws Throwable {
try {
// Create the SerDe
System.out.println("test: testParquetHiveSerDe");
final ParquetHiveSerDe serDe = new ParquetHiveSerDe();
final Configuration conf = new Configuration();
final Properties tbl = createProperties();
serDe.initialize(conf, tbl);
// Data
final Writable[] arr = new Writable[8];
arr[0] = new ByteWritable((byte) 123);
arr[1] = new ShortWritable((short) 456);
arr[2] = new IntWritable(789);
arr[3] = new LongWritable(1000l);
arr[4] = new DoubleWritable((double) 5.3);
arr[5] = new BinaryWritable(Binary.fromString("hive and hadoop and parquet. Big family."));
final Writable[] mapContainer = new Writable[1];
final Writable[] map = new Writable[3];
for (int i = 0; i < 3; ++i) {
final Writable[] pair = new Writable[2];
pair[0] = new BinaryWritable(Binary.fromString("key_" + i));
pair[1] = new IntWritable(i);
map[i] = new ArrayWritable(Writable.class, pair);
}
mapContainer[0] = new ArrayWritable(Writable.class, map);
arr[6] = new ArrayWritable(Writable.class, mapContainer);
final Writable[] arrayContainer = new Writable[1];
final Writable[] array = new Writable[5];
for (int i = 0; i < 5; ++i) {
array[i] = new BinaryWritable(Binary.fromString("elem_" + i));
}
arrayContainer[0] = new ArrayWritable(Writable.class, array);
arr[7] = new ArrayWritable(Writable.class, arrayContainer);
final ArrayWritable arrWritable = new ArrayWritable(Writable.class, arr);
// Test
deserializeAndSerializeLazySimple(serDe, arrWritable);
System.out.println("test: testParquetHiveSerDe - OK");
} catch (final Throwable e) {
e.printStackTrace();
throw e;
}
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project presto by prestodb.
the class TestObjectEncoders method testPrimitiveObjectEncoders.
@Test
public void testPrimitiveObjectEncoders() {
ObjectInspector inspector;
ObjectEncoder encoder;
inspector = writableLongObjectInspector;
encoder = createEncoder(BIGINT, inspector);
assertTrue(encoder.encode(new LongWritable(123456L)) instanceof Long);
inspector = writableIntObjectInspector;
encoder = createEncoder(INTEGER, inspector);
assertTrue(encoder.encode(new IntWritable(12345)) instanceof Long);
inspector = writableShortObjectInspector;
encoder = createEncoder(SMALLINT, inspector);
assertTrue(encoder.encode(new ShortWritable((short) 1234)) instanceof Long);
inspector = writableByteObjectInspector;
encoder = createEncoder(TINYINT, inspector);
assertTrue(encoder.encode(new ByteWritable((byte) 123)) instanceof Long);
inspector = writableBooleanObjectInspector;
encoder = createEncoder(BOOLEAN, inspector);
assertTrue(encoder.encode(new BooleanWritable(true)) instanceof Boolean);
inspector = writableDoubleObjectInspector;
encoder = createEncoder(DOUBLE, inspector);
assertTrue(encoder.encode(new DoubleWritable(0.1)) instanceof Double);
inspector = writableDateObjectInspector;
encoder = createEncoder(DATE, inspector);
assertTrue(encoder.encode(new DateWritable(DateTimeUtils.createDate(18380L))) instanceof Long);
inspector = writableHiveDecimalObjectInspector;
encoder = createEncoder(createDecimalType(11, 10), inspector);
assertTrue(encoder.encode(new HiveDecimalWritable("1.2345678910")) instanceof Long);
encoder = createEncoder(createDecimalType(34, 33), inspector);
assertTrue(encoder.encode(new HiveDecimalWritable("1.281734081274028174012432412423134")) instanceof Slice);
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project presto by prestodb.
the class RcFileTester method decodeRecordReaderValue.
private static Object decodeRecordReaderValue(Type type, Object actualValue) {
if (actualValue instanceof LazyPrimitive) {
actualValue = ((LazyPrimitive<?, ?>) actualValue).getWritableObject();
}
if (actualValue instanceof BooleanWritable) {
actualValue = ((BooleanWritable) actualValue).get();
} else if (actualValue instanceof ByteWritable) {
actualValue = ((ByteWritable) actualValue).get();
} else if (actualValue instanceof BytesWritable) {
actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
} else if (actualValue instanceof DateWritable) {
actualValue = new SqlDate(((DateWritable) actualValue).getDays());
} else if (actualValue instanceof DoubleWritable) {
actualValue = ((DoubleWritable) actualValue).get();
} else if (actualValue instanceof FloatWritable) {
actualValue = ((FloatWritable) actualValue).get();
} else if (actualValue instanceof IntWritable) {
actualValue = ((IntWritable) actualValue).get();
} else if (actualValue instanceof LongWritable) {
actualValue = ((LongWritable) actualValue).get();
} else if (actualValue instanceof ShortWritable) {
actualValue = ((ShortWritable) actualValue).get();
} else if (actualValue instanceof HiveDecimalWritable) {
DecimalType decimalType = (DecimalType) type;
HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
// writable messes with the scale so rescale the values to the Presto type
BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
} else if (actualValue instanceof Text) {
actualValue = actualValue.toString();
} else if (actualValue instanceof TimestampWritable) {
TimestampWritable timestamp = (TimestampWritable) actualValue;
if (SESSION.getSqlFunctionProperties().isLegacyTimestamp()) {
actualValue = new SqlTimestamp((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), UTC_KEY);
} else {
actualValue = new SqlTimestamp((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L));
}
} else if (actualValue instanceof StructObject) {
StructObject structObject = (StructObject) actualValue;
actualValue = decodeRecordReaderStruct(type, structObject.getFieldsAsList());
} else if (actualValue instanceof LazyBinaryArray) {
actualValue = decodeRecordReaderList(type, ((LazyBinaryArray) actualValue).getList());
} else if (actualValue instanceof LazyBinaryMap) {
actualValue = decodeRecordReaderMap(type, ((LazyBinaryMap) actualValue).getMap());
} else if (actualValue instanceof LazyArray) {
actualValue = decodeRecordReaderList(type, ((LazyArray) actualValue).getList());
} else if (actualValue instanceof LazyMap) {
actualValue = decodeRecordReaderMap(type, ((LazyMap) actualValue).getMap());
} else if (actualValue instanceof List) {
actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
}
return actualValue;
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project flink by apache.
the class HiveShimV100 method javaToWritable.
Optional<Writable> javaToWritable(@Nonnull Object value) {
Writable writable = null;
// in case value is already a Writable
if (value instanceof Writable) {
writable = (Writable) value;
} else if (value instanceof Boolean) {
writable = new BooleanWritable((Boolean) value);
} else if (value instanceof Byte) {
writable = new ByteWritable((Byte) value);
} else if (value instanceof Short) {
writable = new ShortWritable((Short) value);
} else if (value instanceof Integer) {
writable = new IntWritable((Integer) value);
} else if (value instanceof Long) {
writable = new LongWritable((Long) value);
} else if (value instanceof Float) {
writable = new FloatWritable((Float) value);
} else if (value instanceof Double) {
writable = new DoubleWritable((Double) value);
} else if (value instanceof String) {
writable = new Text((String) value);
} else if (value instanceof HiveChar) {
writable = new HiveCharWritable((HiveChar) value);
} else if (value instanceof HiveVarchar) {
writable = new HiveVarcharWritable((HiveVarchar) value);
} else if (value instanceof HiveDecimal) {
writable = new HiveDecimalWritable((HiveDecimal) value);
} else if (value instanceof Date) {
writable = new DateWritable((Date) value);
} else if (value instanceof Timestamp) {
writable = new TimestampWritable((Timestamp) value);
} else if (value instanceof BigDecimal) {
HiveDecimal hiveDecimal = HiveDecimal.create((BigDecimal) value);
writable = new HiveDecimalWritable(hiveDecimal);
} else if (value instanceof byte[]) {
writable = new BytesWritable((byte[]) value);
}
return Optional.ofNullable(writable);
}
Aggregations