use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Builder in project flink by apache.
the class RexWindowBoundSerdeTest method testSerde.
@Test
public void testSerde() throws IOException {
SerdeContext serdeCtx = new SerdeContext(null, new FlinkContextImpl(false, TableConfig.getDefault(), new ModuleManager(), null, CatalogManagerMocks.createEmptyCatalogManager(), null), Thread.currentThread().getContextClassLoader(), FlinkTypeFactory.INSTANCE(), FlinkSqlOperatorTable.instance());
ObjectReader objectReader = JsonSerdeUtil.createObjectReader(serdeCtx);
ObjectWriter objectWriter = JsonSerdeUtil.createObjectWriter(serdeCtx);
assertEquals(RexWindowBounds.CURRENT_ROW, objectReader.readValue(objectWriter.writeValueAsString(RexWindowBounds.CURRENT_ROW), RexWindowBound.class));
assertEquals(RexWindowBounds.UNBOUNDED_FOLLOWING, objectReader.readValue(objectWriter.writeValueAsString(RexWindowBounds.UNBOUNDED_FOLLOWING), RexWindowBound.class));
assertEquals(RexWindowBounds.UNBOUNDED_PRECEDING, objectReader.readValue(objectWriter.writeValueAsString(RexWindowBounds.UNBOUNDED_PRECEDING), RexWindowBound.class));
RexBuilder builder = new RexBuilder(FlinkTypeFactory.INSTANCE());
RexWindowBound windowBound = RexWindowBounds.following(builder.makeLiteral("test"));
assertEquals(windowBound, objectReader.readValue(objectWriter.writeValueAsString(windowBound), RexWindowBound.class));
windowBound = RexWindowBounds.preceding(builder.makeLiteral("test"));
assertEquals(windowBound, objectReader.readValue(objectWriter.writeValueAsString(windowBound), RexWindowBound.class));
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Builder in project flink by apache.
the class ChangelogModeJsonDeserializer method deserialize.
@Override
public ChangelogMode deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
ChangelogMode.Builder builder = ChangelogMode.newBuilder();
JsonNode rowKindsNode = jsonParser.readValueAsTree();
for (JsonNode rowKindNode : rowKindsNode) {
RowKind rowKind = RowKind.valueOf(rowKindNode.asText().toUpperCase());
builder.addContainedKind(rowKind);
}
return builder.build();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Builder in project flink by apache.
the class CsvRowSchemaConverter method convert.
/**
* Convert {@link RowTypeInfo} to {@link CsvSchema}.
*/
public static CsvSchema convert(RowTypeInfo rowType) {
final Builder builder = new CsvSchema.Builder();
final String[] fields = rowType.getFieldNames();
final TypeInformation<?>[] types = rowType.getFieldTypes();
for (int i = 0; i < rowType.getArity(); i++) {
builder.addColumn(new Column(i, fields[i], convertType(fields[i], types[i])));
}
return builder.build();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Builder in project flink by apache.
the class CsvRowSchemaConverter method convert.
/**
* Convert {@link RowType} to {@link CsvSchema}.
*/
public static CsvSchema convert(RowType rowType) {
Builder builder = new CsvSchema.Builder();
List<RowType.RowField> fields = rowType.getFields();
for (int i = 0; i < rowType.getFieldCount(); i++) {
String fieldName = fields.get(i).getName();
LogicalType fieldType = fields.get(i).getType();
builder.addColumn(new Column(i, fieldName, convertType(fieldName, fieldType)));
}
return builder.build();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Builder in project flink by apache.
the class SinkIntoKinesis method main.
public static void main(String[] args) throws Exception {
ObjectMapper mapper = new ObjectMapper();
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(10_000);
DataStream<String> fromGen = env.fromSequence(1, 10_000_000L).map(Object::toString).returns(String.class).map(data -> mapper.writeValueAsString(ImmutableMap.of("data", data)));
Properties sinkProperties = new Properties();
sinkProperties.put(AWSConfigConstants.AWS_REGION, "your-region-here");
KinesisDataStreamsSink<String> kdsSink = KinesisDataStreamsSink.<String>builder().setSerializationSchema(new SimpleStringSchema()).setPartitionKeyGenerator(element -> String.valueOf(element.hashCode())).setStreamName("your-stream-name").setMaxBatchSize(20).setKinesisClientProperties(sinkProperties).build();
fromGen.sinkTo(kdsSink);
env.execute("KDS Async Sink Example Program");
}
Aggregations