use of org.apache.flink.table.api.DataTypes.Field in project flink by apache.
the class DataStreamJavaITCase method testFromAndToChangelogStreamEventTime.
@Test
public void testFromAndToChangelogStreamEventTime() throws Exception {
final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
final DataStream<Tuple3<Long, Integer, String>> dataStream = getWatermarkedDataStream();
final DataStream<Row> changelogStream = dataStream.map(t -> Row.ofKind(RowKind.INSERT, t.f1, t.f2)).returns(Types.ROW(Types.INT, Types.STRING));
// derive physical columns and add a rowtime
final Table table = tableEnv.fromChangelogStream(changelogStream, Schema.newBuilder().columnByMetadata("rowtime", TIMESTAMP_LTZ(3)).columnByExpression("computed", $("f1").upperCase()).watermark("rowtime", sourceWatermark()).build());
tableEnv.createTemporaryView("t", table);
// access and reorder columns
final Table reordered = tableEnv.sqlQuery("SELECT computed, rowtime, f0 FROM t");
// write out the rowtime column with fully declared schema
final DataStream<Row> result = tableEnv.toChangelogStream(reordered, Schema.newBuilder().column("f1", STRING()).columnByMetadata("rowtime", TIMESTAMP_LTZ(3)).columnByExpression("ignored", $("f1").upperCase()).column("f0", INT()).build());
// test event time window and field access
testResult(result.keyBy(k -> k.getField("f1")).window(TumblingEventTimeWindows.of(Time.milliseconds(5))).<Row>apply((key, window, input, out) -> {
int sum = 0;
for (Row row : input) {
sum += row.<Integer>getFieldAs("f0");
}
out.collect(Row.of(key, sum));
}).returns(Types.ROW(Types.STRING, Types.INT)), Row.of("A", 47), Row.of("C", 1000), Row.of("C", 1000));
}
use of org.apache.flink.table.api.DataTypes.Field in project flink by apache.
the class DataTypeJsonSerializer method serializeClass.
private static void serializeClass(DataType dataType, JsonGenerator jsonGenerator) throws IOException {
// skip the conversion class if only nested types contain custom conversion classes
if (!isDefaultClass(dataType)) {
jsonGenerator.writeStringField(FIELD_NAME_CONVERSION_CLASS, dataType.getConversionClass().getName());
}
// internal classes only contain nested internal classes
if (isInternal(dataType, false)) {
return;
}
switch(dataType.getLogicalType().getTypeRoot()) {
case ARRAY:
case MULTISET:
final CollectionDataType collectionDataType = (CollectionDataType) dataType;
serializeFieldIfNotDefaultClass(collectionDataType.getElementDataType(), FIELD_NAME_ELEMENT_CLASS, jsonGenerator);
break;
case MAP:
final KeyValueDataType keyValueDataType = (KeyValueDataType) dataType;
serializeFieldIfNotDefaultClass(keyValueDataType.getKeyDataType(), FIELD_NAME_KEY_CLASS, jsonGenerator);
serializeFieldIfNotDefaultClass(keyValueDataType.getValueDataType(), FIELD_NAME_VALUE_CLASS, jsonGenerator);
break;
case ROW:
case STRUCTURED_TYPE:
final List<Field> nonDefaultFields = DataType.getFields(dataType).stream().filter(field -> !isDefaultClassNested(field.getDataType())).collect(Collectors.toList());
if (nonDefaultFields.isEmpty()) {
break;
}
jsonGenerator.writeFieldName(FIELD_NAME_FIELDS);
jsonGenerator.writeStartArray();
for (Field nonDefaultField : nonDefaultFields) {
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField(FIELD_NAME_FIELD_NAME, nonDefaultField.getName());
serializeClass(nonDefaultField.getDataType(), jsonGenerator);
jsonGenerator.writeEndObject();
}
jsonGenerator.writeEndArray();
break;
case DISTINCT_TYPE:
final DataType sourceDataType = dataType.getChildren().get(0);
if (!isDefaultClassNested(sourceDataType)) {
serializeClass(sourceDataType, jsonGenerator);
}
break;
default:
}
}
Aggregations