use of org.apache.flink.storm.util.FiniteInMemorySpout in project flink by apache.
the class ExclamationTopology method buildTopology.
public static TopologyBuilder buildTopology() {
final TopologyBuilder builder = new TopologyBuilder();
// get input data
if (fileInputOutput) {
// read the text file from given input path
final String[] tokens = textPath.split(":");
final String inputFile = tokens[tokens.length - 1];
builder.setSpout(spoutId, new FiniteFileSpout(inputFile));
} else {
builder.setSpout(spoutId, new FiniteInMemorySpout(WordCountData.WORDS));
}
builder.setBolt(firstBoltId, new ExclamationBolt(), 3).shuffleGrouping(spoutId);
builder.setBolt(secondBoltId, new ExclamationBolt(), 2).shuffleGrouping(firstBoltId);
// emit result
if (fileInputOutput) {
// read the text file from given input path
final String[] tokens = outputPath.split(":");
final String outputFile = tokens[tokens.length - 1];
builder.setBolt(sinkId, new BoltFileSink(outputFile, formatter)).shuffleGrouping(secondBoltId);
} else {
builder.setBolt(sinkId, new BoltPrintSink(formatter), 4).shuffleGrouping(secondBoltId);
}
return builder;
}
use of org.apache.flink.storm.util.FiniteInMemorySpout in project flink by apache.
the class ExclamationWithSpout method getTextDataStream.
private static DataStream<String> getTextDataStream(final StreamExecutionEnvironment env) {
if (fileOutput) {
final String[] tokens = textPath.split(":");
final String inputFile = tokens[tokens.length - 1];
// set Storm configuration
StormConfig config = new StormConfig();
config.put(FiniteFileSpout.INPUT_FILE_PATH, inputFile);
env.getConfig().setGlobalJobParameters(config);
return env.addSource(new SpoutWrapper<String>(new FiniteFileSpout(), new String[] { Utils.DEFAULT_STREAM_ID }), TypeExtractor.getForClass(String.class)).setParallelism(1);
}
return env.addSource(new SpoutWrapper<String>(new FiniteInMemorySpout(WordCountData.WORDS), new String[] { Utils.DEFAULT_STREAM_ID }), TypeExtractor.getForClass(String.class)).setParallelism(1);
}
Aggregations