use of org.apache.flink.api.java.typeutils.RowTypeInfo in project flink by apache.
the class DataFormatConvertersTest method testTypes.
@Test
public void testTypes() {
for (int i = 0; i < simpleTypes.length; i++) {
test(simpleTypes[i], simpleValues[i]);
}
test(new RowTypeInfo(simpleTypes), new Row(simpleTypes.length));
test(new RowTypeInfo(simpleTypes), Row.ofKind(RowKind.DELETE, simpleValues));
test(InternalTypeInfo.ofFields(VarCharType.STRING_TYPE, new IntType()), GenericRowData.of(StringData.fromString("hehe"), 111));
test(InternalTypeInfo.ofFields(VarCharType.STRING_TYPE, new IntType()), GenericRowData.of(null, null));
test(new DecimalDataTypeInfo(10, 5), null);
test(new DecimalDataTypeInfo(10, 5), DecimalDataUtils.castFrom(5.555, 10, 5));
test(Types.BIG_DEC, null);
{
DataFormatConverter converter = getConverter(Types.BIG_DEC);
Assert.assertTrue(Arrays.deepEquals(new Object[] { converter.toInternal(converter.toExternal(DecimalDataUtils.castFrom(5, 19, 18))) }, new Object[] { DecimalDataUtils.castFrom(5, 19, 18) }));
}
test(new ListTypeInfo<>(Types.STRING), null);
test(new ListTypeInfo<>(Types.STRING), Arrays.asList("ahah", "xx"));
test(BasicArrayTypeInfo.DOUBLE_ARRAY_TYPE_INFO, new Double[] { 1D, 5D });
test(BasicArrayTypeInfo.DOUBLE_ARRAY_TYPE_INFO, new Double[] { null, null });
test(ObjectArrayTypeInfo.getInfoFor(Types.STRING), new String[] { null, null });
test(ObjectArrayTypeInfo.getInfoFor(Types.STRING), new String[] { "haha", "hehe" });
test(ObjectArrayTypeInfo.getInfoFor(Types.STRING), new String[] { "haha", "hehe" }, new String[] { "aa", "bb" });
test(new MapTypeInfo<>(Types.STRING, Types.INT), null);
HashMap<String, Integer> map = new HashMap<>();
map.put("haha", 1);
map.put("hah1", 5);
map.put(null, null);
test(new MapTypeInfo<>(Types.STRING, Types.INT), map);
Tuple2 tuple2 = new Tuple2<>(5, 10);
TupleTypeInfo tupleTypeInfo = new TupleTypeInfo<>(tuple2.getClass(), Types.INT, Types.INT);
test(tupleTypeInfo, tuple2);
test(TypeExtractor.createTypeInfo(MyPojo.class), new MyPojo(1, 3));
}
use of org.apache.flink.api.java.typeutils.RowTypeInfo in project flink by apache.
the class KafkaTableSinkTestBase method testConfiguration.
@Test
public void testConfiguration() {
KafkaTableSink kafkaTableSink = createTableSink();
KafkaTableSink newKafkaTableSink = kafkaTableSink.configure(FIELD_NAMES, FIELD_TYPES);
assertNotSame(kafkaTableSink, newKafkaTableSink);
assertArrayEquals(FIELD_NAMES, newKafkaTableSink.getFieldNames());
assertArrayEquals(FIELD_TYPES, newKafkaTableSink.getFieldTypes());
assertEquals(new RowTypeInfo(FIELD_TYPES), newKafkaTableSink.getOutputType());
}
use of org.apache.flink.api.java.typeutils.RowTypeInfo in project flink by apache.
the class HBaseTableSource method getReturnType.
@Override
public TypeInformation<Row> getReturnType() {
String[] famNames = schema.getFamilyNames();
TypeInformation<?>[] typeInfos = new TypeInformation[famNames.length];
int i = 0;
for (String family : famNames) {
typeInfos[i] = new RowTypeInfo(schema.getQualifierTypes(family), schema.getQualifierNames(family));
i++;
}
return new RowTypeInfo(typeInfos, famNames);
}
use of org.apache.flink.api.java.typeutils.RowTypeInfo in project flink by apache.
the class PythonTableUtils method converter.
private static Function<Object, Object> converter(final TypeInformation<?> dataType, final ExecutionConfig config) {
if (dataType.equals(Types.BOOLEAN())) {
return b -> b instanceof Boolean ? b : null;
}
if (dataType.equals(Types.BYTE())) {
return c -> {
if (c instanceof Byte) {
return c;
}
if (c instanceof Short) {
return ((Short) c).byteValue();
}
if (c instanceof Integer) {
return ((Integer) c).byteValue();
}
if (c instanceof Long) {
return ((Long) c).byteValue();
}
return null;
};
}
if (dataType.equals(Types.SHORT())) {
return c -> {
if (c instanceof Byte) {
return ((Byte) c).shortValue();
}
if (c instanceof Short) {
return c;
}
if (c instanceof Integer) {
return ((Integer) c).shortValue();
}
if (c instanceof Long) {
return ((Long) c).shortValue();
}
return null;
};
}
if (dataType.equals(Types.INT())) {
return c -> {
if (c instanceof Byte) {
return ((Byte) c).intValue();
}
if (c instanceof Short) {
return ((Short) c).intValue();
}
if (c instanceof Integer) {
return c;
}
if (c instanceof Long) {
return ((Long) c).intValue();
}
return null;
};
}
if (dataType.equals(Types.LONG())) {
return c -> {
if (c instanceof Byte) {
return ((Byte) c).longValue();
}
if (c instanceof Short) {
return ((Short) c).longValue();
}
if (c instanceof Integer) {
return ((Integer) c).longValue();
}
if (c instanceof Long) {
return c;
}
return null;
};
}
if (dataType.equals(Types.FLOAT())) {
return c -> {
if (c instanceof Float) {
return c;
}
if (c instanceof Double) {
return ((Double) c).floatValue();
}
return null;
};
}
if (dataType.equals(Types.DOUBLE())) {
return c -> {
if (c instanceof Float) {
return ((Float) c).doubleValue();
}
if (c instanceof Double) {
return c;
}
return null;
};
}
if (dataType.equals(Types.DECIMAL())) {
return c -> c instanceof BigDecimal ? c : null;
}
if (dataType.equals(Types.SQL_DATE())) {
return c -> {
if (c instanceof Integer) {
long millisLocal = ((Integer) c).longValue() * 86400000;
long millisUtc = millisLocal - PythonTableUtils.getOffsetFromLocalMillis(millisLocal);
return new Date(millisUtc);
}
return null;
};
}
if (dataType.equals(Types.SQL_TIME())) {
return c -> c instanceof Integer || c instanceof Long ? new Time(((Number) c).longValue() / 1000) : null;
}
if (dataType.equals(Types.SQL_TIMESTAMP())) {
return c -> c instanceof Integer || c instanceof Long ? new Timestamp(((Number) c).longValue() / 1000) : null;
}
if (dataType.equals(org.apache.flink.api.common.typeinfo.Types.INSTANT)) {
return c -> c instanceof Integer || c instanceof Long ? Instant.ofEpochMilli(((Number) c).longValue() / 1000) : null;
}
if (dataType.equals(Types.INTERVAL_MILLIS())) {
return c -> c instanceof Integer || c instanceof Long ? ((Number) c).longValue() / 1000 : null;
}
if (dataType.equals(Types.STRING())) {
return c -> c != null ? c.toString() : null;
}
if (dataType.equals(PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO)) {
return c -> {
if (c instanceof String) {
return ((String) c).getBytes(StandardCharsets.UTF_8);
}
if (c instanceof byte[]) {
return c;
}
return null;
};
}
if (dataType instanceof PrimitiveArrayTypeInfo || dataType instanceof BasicArrayTypeInfo || dataType instanceof ObjectArrayTypeInfo) {
TypeInformation<?> elementType = dataType instanceof PrimitiveArrayTypeInfo ? ((PrimitiveArrayTypeInfo<?>) dataType).getComponentType() : dataType instanceof BasicArrayTypeInfo ? ((BasicArrayTypeInfo<?, ?>) dataType).getComponentInfo() : ((ObjectArrayTypeInfo<?, ?>) dataType).getComponentInfo();
boolean primitive = dataType instanceof PrimitiveArrayTypeInfo;
Function<Object, Object> elementConverter = converter(elementType, config);
BiFunction<Integer, Function<Integer, Object>, Object> arrayConstructor = arrayConstructor(elementType, primitive);
return c -> {
int length = -1;
Function<Integer, Object> elementGetter = null;
if (c instanceof List) {
length = ((List<?>) c).size();
elementGetter = i -> elementConverter.apply(((List<?>) c).get(i));
}
if (c != null && c.getClass().isArray()) {
length = Array.getLength(c);
elementGetter = i -> elementConverter.apply(Array.get(c, i));
}
if (elementGetter != null) {
return arrayConstructor.apply(length, elementGetter);
}
return null;
};
}
if (dataType instanceof MapTypeInfo) {
Function<Object, Object> keyConverter = converter(((MapTypeInfo<?, ?>) dataType).getKeyTypeInfo(), config);
Function<Object, Object> valueConverter = converter(((MapTypeInfo<?, ?>) dataType).getValueTypeInfo(), config);
return c -> c instanceof Map ? ((Map<?, ?>) c).entrySet().stream().collect(Collectors.toMap(e -> keyConverter.apply(e.getKey()), e -> valueConverter.apply(e.getValue()))) : null;
}
if (dataType instanceof RowTypeInfo) {
TypeInformation<?>[] fieldTypes = ((RowTypeInfo) dataType).getFieldTypes();
List<Function<Object, Object>> fieldConverters = Arrays.stream(fieldTypes).map(x -> PythonTableUtils.converter(x, config)).collect(Collectors.toList());
return c -> {
if (c != null && c.getClass().isArray()) {
int length = Array.getLength(c);
if (length - 1 != fieldTypes.length) {
throw new IllegalStateException("Input row doesn't have expected number of values required by the schema. " + fieldTypes.length + " fields are required while " + (length - 1) + " values are provided.");
}
Row row = new Row(length - 1);
row.setKind(RowKind.fromByteValue(((Number) Array.get(c, 0)).byteValue()));
for (int i = 0; i < row.getArity(); i++) {
row.setField(i, fieldConverters.get(i).apply(Array.get(c, i + 1)));
}
return row;
}
return null;
};
}
if (dataType instanceof TupleTypeInfo) {
TypeInformation<?>[] fieldTypes = ((TupleTypeInfo<?>) dataType).getFieldTypes();
List<Function<Object, Object>> fieldConverters = Arrays.stream(fieldTypes).map(x -> PythonTableUtils.converter(x, config)).collect(Collectors.toList());
return c -> {
if (c != null && c.getClass().isArray()) {
int length = Array.getLength(c);
if (length != fieldTypes.length) {
throw new IllegalStateException("Input tuple doesn't have expected number of values required by the schema. " + fieldTypes.length + " fields are required while " + length + " values are provided.");
}
Tuple tuple = Tuple.newInstance(length);
for (int i = 0; i < tuple.getArity(); i++) {
tuple.setField(fieldConverters.get(i).apply(Array.get(c, i)), i);
}
return tuple;
}
return null;
};
}
return c -> {
if (c.getClass() != byte[].class || dataType instanceof PickledByteArrayTypeInfo) {
return c;
}
// other typeinfos will use the corresponding serializer to deserialize data.
byte[] b = (byte[]) c;
TypeSerializer<?> dataSerializer = dataType.createSerializer(config);
ByteArrayInputStreamWithPos bais = new ByteArrayInputStreamWithPos();
DataInputViewStreamWrapper baisWrapper = new DataInputViewStreamWrapper(bais);
bais.setBuffer(b, 0, b.length);
try {
return dataSerializer.deserialize(baisWrapper);
} catch (IOException e) {
throw new IllegalStateException("Failed to deserialize the object with datatype " + dataType, e);
}
};
}
use of org.apache.flink.api.java.typeutils.RowTypeInfo in project flink by apache.
the class MinByOperatorTest method testMinByRowTypeInfoKeyFieldsForUnsortedGrouping.
/**
* Validates that no ClassCastException happens should not fail e.g. like in FLINK-8255.
*/
@Test(expected = InvalidProgramException.class)
public void testMinByRowTypeInfoKeyFieldsForUnsortedGrouping() {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
TypeInformation[] types = new TypeInformation[] { Types.INT, Types.INT };
String[] fieldNames = new String[] { "id", "value" };
RowTypeInfo rowTypeInfo = new RowTypeInfo(types, fieldNames);
UnsortedGrouping groupDs = env.fromCollection(Collections.singleton(new Row(2)), rowTypeInfo).groupBy(0);
groupDs.minBy(1);
}
Aggregations