use of org.apache.flink.storm.util.BoltFileSink in project flink by apache.
the class WordCountTopology method buildTopology.
public static TopologyBuilder buildTopology(boolean indexOrName) {
final TopologyBuilder builder = new TopologyBuilder();
// get input data
if (fileInputOutput) {
// read the text file from given input path
final String[] tokens = textPath.split(":");
final String inputFile = tokens[tokens.length - 1];
// inserting NullTerminatingSpout only required to stabilize integration test
builder.setSpout(spoutId, new NullTerminatingSpout(new WordCountFileSpout(inputFile)));
} else {
builder.setSpout(spoutId, new WordCountInMemorySpout());
}
if (indexOrName) {
// split up the lines in pairs (2-tuples) containing: (word,1)
builder.setBolt(tokenierzerId, new BoltTokenizer(), 4).shuffleGrouping(spoutId);
// group by the tuple field "0" and sum up tuple field "1"
builder.setBolt(counterId, new BoltCounter(), 4).fieldsGrouping(tokenierzerId, new Fields(BoltTokenizer.ATTRIBUTE_WORD));
} else {
// split up the lines in pairs (2-tuples) containing: (word,1)
builder.setBolt(tokenierzerId, new BoltTokenizerByName(), 4).shuffleGrouping(spoutId);
// group by the tuple field "0" and sum up tuple field "1"
builder.setBolt(counterId, new BoltCounterByName(), 4).fieldsGrouping(tokenierzerId, new Fields(BoltTokenizerByName.ATTRIBUTE_WORD));
}
// emit result
if (fileInputOutput) {
// read the text file from given input path
final String[] tokens = outputPath.split(":");
final String outputFile = tokens[tokens.length - 1];
builder.setBolt(sinkId, new BoltFileSink(outputFile, formatter)).shuffleGrouping(counterId);
} else {
builder.setBolt(sinkId, new BoltPrintSink(formatter), 4).shuffleGrouping(counterId);
}
return builder;
}
use of org.apache.flink.storm.util.BoltFileSink in project flink by apache.
the class StormUnionITCase method testProgram.
@Override
protected void testProgram() throws Exception {
final TopologyBuilder builder = new TopologyBuilder();
// get input data
builder.setSpout(spoutId1, new FiniteRandomSpout(0, 10));
builder.setSpout(spoutId2, new FiniteRandomSpout(1, 8));
builder.setSpout(spoutId3, new FiniteRandomSpout(2, 13));
builder.setBolt(boltId, new MergerBolt()).shuffleGrouping(spoutId1, FiniteRandomSpout.STREAM_PREFIX + 0).shuffleGrouping(spoutId2, FiniteRandomSpout.STREAM_PREFIX + 0).shuffleGrouping(spoutId3, FiniteRandomSpout.STREAM_PREFIX + 0);
final String[] tokens = this.resultPath.split(":");
final String outputFile = tokens[tokens.length - 1];
builder.setBolt(sinkId, new BoltFileSink(outputFile)).shuffleGrouping(boltId);
// execute program locally
final FlinkLocalCluster cluster = FlinkLocalCluster.getLocalCluster();
Config conf = new Config();
// only required to stabilize integration test
conf.put(FlinkLocalCluster.SUBMIT_BLOCKING, true);
cluster.submitTopology(topologyId, conf, FlinkTopology.createTopology(builder));
cluster.shutdown();
}
Aggregations