use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.
the class AvroToRowDataConverters method createArrayConverter.
private static AvroToRowDataConverter createArrayConverter(ArrayType arrayType) {
final AvroToRowDataConverter elementConverter = createNullableConverter(arrayType.getElementType());
final Class<?> elementClass = LogicalTypeUtils.toInternalConversionClass(arrayType.getElementType());
return avroObject -> {
final List<?> list = (List<?>) avroObject;
final int length = list.size();
final Object[] array = (Object[]) Array.newInstance(elementClass, length);
for (int i = 0; i < length; ++i) {
array[i] = elementConverter.convert(list.get(i));
}
return new GenericArrayData(array);
};
}
use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.
the class RowDataToAvroConverters method createConverter.
// --------------------------------------------------------------------------------
// IMPORTANT! We use anonymous classes instead of lambdas for a reason here. It is
// necessary because the maven shade plugin cannot relocate classes in
// SerializedLambdas (MSHADE-260). On the other hand we want to relocate Avro for
// sql-client uber jars.
// --------------------------------------------------------------------------------
/**
* Creates a runtime converter according to the given logical type that converts objects of
* Flink Table & SQL internal data structures to corresponding Avro data structures.
*/
public static RowDataToAvroConverter createConverter(LogicalType type) {
final RowDataToAvroConverter converter;
switch(type.getTypeRoot()) {
case NULL:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return null;
}
};
break;
case TINYINT:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return ((Byte) object).intValue();
}
};
break;
case SMALLINT:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return ((Short) object).intValue();
}
};
break;
// boolean
case BOOLEAN:
// int
case INTEGER:
// long
case INTERVAL_YEAR_MONTH:
// long
case BIGINT:
// long
case INTERVAL_DAY_TIME:
// float
case FLOAT:
// double
case DOUBLE:
// int
case TIME_WITHOUT_TIME_ZONE:
case // int
DATE:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return object;
}
};
break;
case CHAR:
case VARCHAR:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return new Utf8(object.toString());
}
};
break;
case BINARY:
case VARBINARY:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return ByteBuffer.wrap((byte[]) object);
}
};
break;
case TIMESTAMP_WITHOUT_TIME_ZONE:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return ((TimestampData) object).toInstant().toEpochMilli();
}
};
break;
case DECIMAL:
converter = new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
return ByteBuffer.wrap(((DecimalData) object).toUnscaledBytes());
}
};
break;
case ARRAY:
converter = createArrayConverter((ArrayType) type);
break;
case ROW:
converter = createRowConverter((RowType) type);
break;
case MAP:
case MULTISET:
converter = createMapConverter(type);
break;
case RAW:
default:
throw new UnsupportedOperationException("Unsupported type: " + type);
}
// wrap into nullable converter
return new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
if (object == null) {
return null;
}
// get actual schema if it is a nullable schema
Schema actualSchema;
if (schema.getType() == Schema.Type.UNION) {
List<Schema> types = schema.getTypes();
int size = types.size();
if (size == 2 && types.get(1).getType() == Schema.Type.NULL) {
actualSchema = types.get(0);
} else if (size == 2 && types.get(0).getType() == Schema.Type.NULL) {
actualSchema = types.get(1);
} else {
throw new IllegalArgumentException("The Avro schema is not a nullable type: " + schema.toString());
}
} else {
actualSchema = schema;
}
return converter.convert(actualSchema, object);
}
};
}
use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.
the class RowDataVectorizer method setColumn.
private static void setColumn(int rowId, ListColumnVector listColumnVector, LogicalType type, RowData row, int columnId) {
ArrayData arrayData = row.getArray(columnId);
ArrayType arrayType = (ArrayType) type;
listColumnVector.lengths[rowId] = arrayData.size();
listColumnVector.offsets[rowId] = listColumnVector.childCount;
listColumnVector.childCount += listColumnVector.lengths[rowId];
listColumnVector.child.ensureSize(listColumnVector.childCount, listColumnVector.offsets[rowId] != 0);
RowData convertedRowData = convert(arrayData, arrayType.getElementType());
for (int i = 0; i < arrayData.size(); i++) {
setColumn((int) listColumnVector.offsets[rowId] + i, listColumnVector.child, arrayType.getElementType(), convertedRowData, i);
}
}
use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.
the class HiveInspectors method getConversion.
/**
* Get conversion for converting Flink object to Hive object from an ObjectInspector and the
* corresponding Flink DataType.
*/
public static HiveObjectConversion getConversion(ObjectInspector inspector, LogicalType dataType, HiveShim hiveShim) {
if (inspector instanceof PrimitiveObjectInspector) {
HiveObjectConversion conversion;
if (inspector instanceof BooleanObjectInspector || inspector instanceof StringObjectInspector || inspector instanceof ByteObjectInspector || inspector instanceof ShortObjectInspector || inspector instanceof IntObjectInspector || inspector instanceof LongObjectInspector || inspector instanceof FloatObjectInspector || inspector instanceof DoubleObjectInspector || inspector instanceof BinaryObjectInspector || inspector instanceof VoidObjectInspector) {
conversion = IdentityConversion.INSTANCE;
} else if (inspector instanceof DateObjectInspector) {
conversion = hiveShim::toHiveDate;
} else if (inspector instanceof TimestampObjectInspector) {
conversion = hiveShim::toHiveTimestamp;
} else if (inspector instanceof HiveCharObjectInspector) {
conversion = o -> o == null ? null : new HiveChar((String) o, ((CharType) dataType).getLength());
} else if (inspector instanceof HiveVarcharObjectInspector) {
conversion = o -> o == null ? null : new HiveVarchar((String) o, ((VarCharType) dataType).getLength());
} else if (inspector instanceof HiveDecimalObjectInspector) {
conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o);
} else {
throw new FlinkHiveUDFException("Unsupported primitive object inspector " + inspector.getClass().getName());
}
// currently this happens for constant arguments for UDFs
if (((PrimitiveObjectInspector) inspector).preferWritable()) {
conversion = new WritableHiveObjectConversion(conversion, hiveShim);
}
return conversion;
}
if (inspector instanceof ListObjectInspector) {
HiveObjectConversion eleConvert = getConversion(((ListObjectInspector) inspector).getListElementObjectInspector(), ((ArrayType) dataType).getElementType(), hiveShim);
return o -> {
if (o == null) {
return null;
}
Object[] array = (Object[]) o;
List<Object> result = new ArrayList<>();
for (Object ele : array) {
result.add(eleConvert.toHiveObject(ele));
}
return result;
};
}
if (inspector instanceof MapObjectInspector) {
MapObjectInspector mapInspector = (MapObjectInspector) inspector;
MapType kvType = (MapType) dataType;
HiveObjectConversion keyConversion = getConversion(mapInspector.getMapKeyObjectInspector(), kvType.getKeyType(), hiveShim);
HiveObjectConversion valueConversion = getConversion(mapInspector.getMapValueObjectInspector(), kvType.getValueType(), hiveShim);
return o -> {
if (o == null) {
return null;
}
Map<Object, Object> map = (Map) o;
Map<Object, Object> result = new HashMap<>(map.size());
for (Map.Entry<Object, Object> entry : map.entrySet()) {
result.put(keyConversion.toHiveObject(entry.getKey()), valueConversion.toHiveObject(entry.getValue()));
}
return result;
};
}
if (inspector instanceof StructObjectInspector) {
StructObjectInspector structInspector = (StructObjectInspector) inspector;
List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
List<RowType.RowField> rowFields = ((RowType) dataType).getFields();
HiveObjectConversion[] conversions = new HiveObjectConversion[structFields.size()];
for (int i = 0; i < structFields.size(); i++) {
conversions[i] = getConversion(structFields.get(i).getFieldObjectInspector(), rowFields.get(i).getType(), hiveShim);
}
return o -> {
if (o == null) {
return null;
}
Row row = (Row) o;
List<Object> result = new ArrayList<>(row.getArity());
for (int i = 0; i < row.getArity(); i++) {
result.add(conversions[i].toHiveObject(row.getField(i)));
}
return result;
};
}
throw new FlinkHiveUDFException(String.format("Flink doesn't support convert object conversion for %s yet", inspector));
}
use of org.apache.flink.table.types.logical.ArrayType in project flink by apache.
the class HiveFunctionUtil method isPrimitiveArray.
private static boolean isPrimitiveArray(DataType dataType) {
if (isArrayType(dataType)) {
ArrayType arrayType = (ArrayType) dataType.getLogicalType();
LogicalType elementType = arrayType.getElementType();
return !(elementType.isNullable() || !isPrimitive(elementType));
} else {
return false;
}
}
Aggregations