use of org.apache.flink.batch.connectors.cassandra.CassandraPojoInputFormat in project flink by apache.
the class BatchPojoExample method main.
public static void main(String[] args) throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
List<Pojo> customCassandraAnnotatedPojos = IntStream.range(0, 20).mapToObj(x -> new Pojo(UUID.randomUUID().toString(), x, 0)).collect(Collectors.toList());
DataSet<Pojo> dataSet = env.fromCollection(customCassandraAnnotatedPojos);
ClusterBuilder clusterBuilder = new ClusterBuilder() {
private static final long serialVersionUID = -1754532803757154795L;
@Override
protected Cluster buildCluster(Cluster.Builder builder) {
return builder.addContactPoints("127.0.0.1").build();
}
};
dataSet.output(new CassandraPojoOutputFormat<>(clusterBuilder, Pojo.class, () -> new Mapper.Option[] { Mapper.Option.saveNullFields(true) }));
env.execute("Write");
/*
* This is for the purpose of showing an example of creating a DataSet using CassandraPojoInputFormat.
*/
DataSet<Pojo> inputDS = env.createInput(new CassandraPojoInputFormat<>(SELECT_QUERY, clusterBuilder, Pojo.class, () -> new Mapper.Option[] { Mapper.Option.consistencyLevel(ConsistencyLevel.ANY) }));
inputDS.print();
}
use of org.apache.flink.batch.connectors.cassandra.CassandraPojoInputFormat in project flink by apache.
the class CassandraConnectorITCase method readPojosWithInputFormat.
private <T> List<T> readPojosWithInputFormat(Class<T> annotatedPojoClass) {
final CassandraPojoInputFormat<T> source = new CassandraPojoInputFormat<>(injectTableName(SELECT_DATA_QUERY), builderForReading, annotatedPojoClass);
List<T> result = new ArrayList<>();
try {
source.configure(new Configuration());
source.open(null);
while (!source.reachedEnd()) {
T temp = source.nextRecord(null);
result.add(temp);
}
} finally {
source.close();
}
return result;
}
Aggregations