use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.
the class ArrowUtilsTest method init.
@BeforeClass
public static void init() {
testFields = new ArrayList<>();
testFields.add(Tuple5.of("f1", new TinyIntType(), new ArrowType.Int(8, true), TinyIntWriter.TinyIntWriterForRow.class, ArrowTinyIntColumnVector.class));
testFields.add(Tuple5.of("f2", new SmallIntType(), new ArrowType.Int(8 * 2, true), SmallIntWriter.SmallIntWriterForRow.class, ArrowSmallIntColumnVector.class));
testFields.add(Tuple5.of("f3", new IntType(), new ArrowType.Int(8 * 4, true), IntWriter.IntWriterForRow.class, ArrowIntColumnVector.class));
testFields.add(Tuple5.of("f4", new BigIntType(), new ArrowType.Int(8 * 8, true), BigIntWriter.BigIntWriterForRow.class, ArrowBigIntColumnVector.class));
testFields.add(Tuple5.of("f5", new BooleanType(), new ArrowType.Bool(), BooleanWriter.BooleanWriterForRow.class, ArrowBooleanColumnVector.class));
testFields.add(Tuple5.of("f6", new FloatType(), new ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE), FloatWriter.FloatWriterForRow.class, ArrowFloatColumnVector.class));
testFields.add(Tuple5.of("f7", new DoubleType(), new ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE), DoubleWriter.DoubleWriterForRow.class, ArrowDoubleColumnVector.class));
testFields.add(Tuple5.of("f8", new VarCharType(), ArrowType.Utf8.INSTANCE, VarCharWriter.VarCharWriterForRow.class, ArrowVarCharColumnVector.class));
testFields.add(Tuple5.of("f9", new VarBinaryType(), ArrowType.Binary.INSTANCE, VarBinaryWriter.VarBinaryWriterForRow.class, ArrowVarBinaryColumnVector.class));
testFields.add(Tuple5.of("f10", new DecimalType(10, 3), new ArrowType.Decimal(10, 3), DecimalWriter.DecimalWriterForRow.class, ArrowDecimalColumnVector.class));
testFields.add(Tuple5.of("f11", new DateType(), new ArrowType.Date(DateUnit.DAY), DateWriter.DateWriterForRow.class, ArrowDateColumnVector.class));
testFields.add(Tuple5.of("f13", new TimeType(0), new ArrowType.Time(TimeUnit.SECOND, 32), TimeWriter.TimeWriterForRow.class, ArrowTimeColumnVector.class));
testFields.add(Tuple5.of("f14", new TimeType(2), new ArrowType.Time(TimeUnit.MILLISECOND, 32), TimeWriter.TimeWriterForRow.class, ArrowTimeColumnVector.class));
testFields.add(Tuple5.of("f15", new TimeType(4), new ArrowType.Time(TimeUnit.MICROSECOND, 64), TimeWriter.TimeWriterForRow.class, ArrowTimeColumnVector.class));
testFields.add(Tuple5.of("f16", new TimeType(8), new ArrowType.Time(TimeUnit.NANOSECOND, 64), TimeWriter.TimeWriterForRow.class, ArrowTimeColumnVector.class));
testFields.add(Tuple5.of("f17", new LocalZonedTimestampType(0), new ArrowType.Timestamp(TimeUnit.SECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class));
testFields.add(Tuple5.of("f18", new LocalZonedTimestampType(2), new ArrowType.Timestamp(TimeUnit.MILLISECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class));
testFields.add(Tuple5.of("f19", new LocalZonedTimestampType(4), new ArrowType.Timestamp(TimeUnit.MICROSECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class));
testFields.add(Tuple5.of("f20", new LocalZonedTimestampType(8), new ArrowType.Timestamp(TimeUnit.NANOSECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class));
testFields.add(Tuple5.of("f21", new TimestampType(0), new ArrowType.Timestamp(TimeUnit.SECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class));
testFields.add(Tuple5.of("f22", new TimestampType(2), new ArrowType.Timestamp(TimeUnit.MILLISECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class));
testFields.add(Tuple5.of("f23", new TimestampType(4), new ArrowType.Timestamp(TimeUnit.MICROSECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class));
testFields.add(Tuple5.of("f24", new TimestampType(8), new ArrowType.Timestamp(TimeUnit.NANOSECOND, null), TimestampWriter.TimestampWriterForRow.class, ArrowTimestampColumnVector.class));
testFields.add(Tuple5.of("f25", new ArrayType(new VarCharType()), ArrowType.List.INSTANCE, ArrayWriter.ArrayWriterForRow.class, ArrowArrayColumnVector.class));
RowType rowFieldType = new RowType(Arrays.asList(new RowType.RowField("a", new IntType()), new RowType.RowField("b", new VarCharType()), new RowType.RowField("c", new ArrayType(new VarCharType())), new RowType.RowField("d", new TimestampType(2)), new RowType.RowField("e", new RowType((Arrays.asList(new RowType.RowField("e1", new IntType()), new RowType.RowField("e2", new VarCharType())))))));
testFields.add(Tuple5.of("f26", rowFieldType, ArrowType.Struct.INSTANCE, RowWriter.RowWriterForRow.class, ArrowRowColumnVector.class));
List<RowType.RowField> rowFields = new ArrayList<>();
for (Tuple5<String, LogicalType, ArrowType, Class<?>, Class<?>> field : testFields) {
rowFields.add(new RowType.RowField(field.f0, field.f1));
}
rowType = new RowType(rowFields);
allocator = ArrowUtils.getRootAllocator().newChildAllocator("stdout", 0, Long.MAX_VALUE);
}
use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.
the class CsvToRowDataConverters method createArrayConverter.
private CsvToRowDataConverter createArrayConverter(ArrayType arrayType) {
final CsvToRowDataConverter elementConverter = createNullableConverter(arrayType.getElementType());
final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
return jsonNode -> {
final ArrayNode node = (ArrayNode) jsonNode;
final Object[] array = (Object[]) Array.newInstance(elementClass, node.size());
for (int i = 0; i < node.size(); i++) {
final JsonNode innerNode = node.get(i);
array[i] = elementConverter.convert(innerNode);
}
return new GenericArrayData(array);
};
}
use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.
the class RowDataToCsvConverters method createArrayRowFieldConverter.
private static RowFieldConverter createArrayRowFieldConverter(ArrayType type) {
LogicalType elementType = type.getElementType();
final ArrayElementConverter elementConverter = createNullableArrayElementConverter(elementType);
return (csvMapper, container, row, pos) -> {
ArrayNode arrayNode = csvMapper.createArrayNode();
ArrayData arrayData = row.getArray(pos);
int numElements = arrayData.size();
for (int i = 0; i < numElements; i++) {
arrayNode.add(elementConverter.convert(csvMapper, arrayNode, arrayData, i));
}
return arrayNode;
};
}
use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.
the class PythonBridgeUtils method getPickledBytesFromJavaObject.
private static Object getPickledBytesFromJavaObject(Object obj, LogicalType dataType) throws IOException {
Pickler pickler = new Pickler();
initialize();
if (obj == null) {
return new byte[0];
} else {
if (dataType instanceof DateType) {
long time;
if (obj instanceof LocalDate) {
time = ((LocalDate) (obj)).toEpochDay();
} else {
time = ((Date) obj).toLocalDate().toEpochDay();
}
return pickler.dumps(time);
} else if (dataType instanceof TimeType) {
long time;
if (obj instanceof LocalTime) {
time = ((LocalTime) obj).toNanoOfDay();
} else {
time = ((Time) obj).toLocalTime().toNanoOfDay();
}
time = time / 1000;
return pickler.dumps(time);
} else if (dataType instanceof TimestampType) {
if (obj instanceof LocalDateTime) {
return pickler.dumps(Timestamp.valueOf((LocalDateTime) obj));
} else {
return pickler.dumps(obj);
}
} else if (dataType instanceof RowType) {
Row tmpRow = (Row) obj;
LogicalType[] tmpRowFieldTypes = ((RowType) dataType).getChildren().toArray(new LogicalType[0]);
List<Object> rowFieldBytes = new ArrayList<>(tmpRow.getArity() + 1);
rowFieldBytes.add(new byte[] { tmpRow.getKind().toByteValue() });
for (int i = 0; i < tmpRow.getArity(); i++) {
rowFieldBytes.add(getPickledBytesFromJavaObject(tmpRow.getField(i), tmpRowFieldTypes[i]));
}
return rowFieldBytes;
} else if (dataType instanceof MapType) {
List<List<Object>> serializedMapKV = new ArrayList<>(2);
MapType mapType = (MapType) dataType;
Map<Object, Object> mapObj = (Map) obj;
List<Object> keyBytesList = new ArrayList<>(mapObj.size());
List<Object> valueBytesList = new ArrayList<>(mapObj.size());
for (Map.Entry entry : mapObj.entrySet()) {
keyBytesList.add(getPickledBytesFromJavaObject(entry.getKey(), mapType.getKeyType()));
valueBytesList.add(getPickledBytesFromJavaObject(entry.getValue(), mapType.getValueType()));
}
serializedMapKV.add(keyBytesList);
serializedMapKV.add(valueBytesList);
return pickler.dumps(serializedMapKV);
} else if (dataType instanceof ArrayType) {
Object[] objects = (Object[]) obj;
List<Object> serializedElements = new ArrayList<>(objects.length);
ArrayType arrayType = (ArrayType) dataType;
LogicalType elementType = arrayType.getElementType();
for (Object object : objects) {
serializedElements.add(getPickledBytesFromJavaObject(object, elementType));
}
return pickler.dumps(serializedElements);
}
if (dataType instanceof FloatType) {
return pickler.dumps(String.valueOf(obj));
} else {
return pickler.dumps(obj);
}
}
}
use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.
the class InternalDataUtils method toGenericArray.
static GenericArrayData toGenericArray(ArrayData arrayData, LogicalType logicalType) {
final LogicalType innerElement = ((ArrayType) logicalType).getElementType();
final ArrayData.ElementGetter elementGetter = ArrayData.createElementGetter(innerElement);
final Object[] newArray = new Object[arrayData.size()];
for (int i = 0; i < arrayData.size(); i++) {
if (arrayData.isNullAt(i)) {
newArray[i] = null;
} else {
newArray[i] = toGenericInternalData(elementGetter.getElementOrNull(arrayData, i), innerElement);
}
}
return new GenericArrayData(newArray);
}
Aggregations