use of org.apache.flink.storm.util.BoltPrintSink in project flink by apache.
the class SplitBoltTopology method buildTopology.
public static TopologyBuilder buildTopology() {
final TopologyBuilder builder = new TopologyBuilder();
builder.setSpout(spoutId, new RandomSpout(false, seed));
builder.setBolt(boltId, new SplitBolt()).shuffleGrouping(spoutId);
builder.setBolt(evenVerifierId, new VerifyAndEnrichBolt(true)).shuffleGrouping(boltId, SplitBolt.EVEN_STREAM);
builder.setBolt(oddVerifierId, new VerifyAndEnrichBolt(false)).shuffleGrouping(boltId, SplitBolt.ODD_STREAM);
// emit result
if (outputPath != null) {
// read the text file from given input path
final String[] tokens = outputPath.split(":");
final String outputFile = tokens[tokens.length - 1];
builder.setBolt(sinkId, new BoltFileSink(outputFile, formatter)).shuffleGrouping(evenVerifierId).shuffleGrouping(oddVerifierId);
} else {
builder.setBolt(sinkId, new BoltPrintSink(formatter), 4).shuffleGrouping(evenVerifierId).shuffleGrouping(oddVerifierId);
}
return builder;
}
use of org.apache.flink.storm.util.BoltPrintSink in project flink by apache.
the class SplitSpoutTopology method buildTopology.
public static TopologyBuilder buildTopology() {
final TopologyBuilder builder = new TopologyBuilder();
builder.setSpout(spoutId, new RandomSpout(true, seed));
builder.setBolt(evenVerifierId, new VerifyAndEnrichBolt(true)).shuffleGrouping(spoutId, RandomSpout.EVEN_STREAM);
builder.setBolt(oddVerifierId, new VerifyAndEnrichBolt(false)).shuffleGrouping(spoutId, RandomSpout.ODD_STREAM);
// emit result
if (outputPath != null) {
// read the text file from given input path
final String[] tokens = outputPath.split(":");
final String outputFile = tokens[tokens.length - 1];
builder.setBolt(sinkId, new BoltFileSink(outputFile, formatter)).shuffleGrouping(evenVerifierId).shuffleGrouping(oddVerifierId);
} else {
builder.setBolt(sinkId, new BoltPrintSink(formatter), 4).shuffleGrouping(evenVerifierId).shuffleGrouping(oddVerifierId);
}
return builder;
}
use of org.apache.flink.storm.util.BoltPrintSink in project flink by apache.
the class ExclamationTopology method buildTopology.
public static TopologyBuilder buildTopology() {
final TopologyBuilder builder = new TopologyBuilder();
// get input data
if (fileInputOutput) {
// read the text file from given input path
final String[] tokens = textPath.split(":");
final String inputFile = tokens[tokens.length - 1];
builder.setSpout(spoutId, new FiniteFileSpout(inputFile));
} else {
builder.setSpout(spoutId, new FiniteInMemorySpout(WordCountData.WORDS));
}
builder.setBolt(firstBoltId, new ExclamationBolt(), 3).shuffleGrouping(spoutId);
builder.setBolt(secondBoltId, new ExclamationBolt(), 2).shuffleGrouping(firstBoltId);
// emit result
if (fileInputOutput) {
// read the text file from given input path
final String[] tokens = outputPath.split(":");
final String outputFile = tokens[tokens.length - 1];
builder.setBolt(sinkId, new BoltFileSink(outputFile, formatter)).shuffleGrouping(secondBoltId);
} else {
builder.setBolt(sinkId, new BoltPrintSink(formatter), 4).shuffleGrouping(secondBoltId);
}
return builder;
}
use of org.apache.flink.storm.util.BoltPrintSink in project flink by apache.
the class WordCountTopology method buildTopology.
public static TopologyBuilder buildTopology(boolean indexOrName) {
final TopologyBuilder builder = new TopologyBuilder();
// get input data
if (fileInputOutput) {
// read the text file from given input path
final String[] tokens = textPath.split(":");
final String inputFile = tokens[tokens.length - 1];
// inserting NullTerminatingSpout only required to stabilize integration test
builder.setSpout(spoutId, new NullTerminatingSpout(new WordCountFileSpout(inputFile)));
} else {
builder.setSpout(spoutId, new WordCountInMemorySpout());
}
if (indexOrName) {
// split up the lines in pairs (2-tuples) containing: (word,1)
builder.setBolt(tokenierzerId, new BoltTokenizer(), 4).shuffleGrouping(spoutId);
// group by the tuple field "0" and sum up tuple field "1"
builder.setBolt(counterId, new BoltCounter(), 4).fieldsGrouping(tokenierzerId, new Fields(BoltTokenizer.ATTRIBUTE_WORD));
} else {
// split up the lines in pairs (2-tuples) containing: (word,1)
builder.setBolt(tokenierzerId, new BoltTokenizerByName(), 4).shuffleGrouping(spoutId);
// group by the tuple field "0" and sum up tuple field "1"
builder.setBolt(counterId, new BoltCounterByName(), 4).fieldsGrouping(tokenierzerId, new Fields(BoltTokenizerByName.ATTRIBUTE_WORD));
}
// emit result
if (fileInputOutput) {
// read the text file from given input path
final String[] tokens = outputPath.split(":");
final String outputFile = tokens[tokens.length - 1];
builder.setBolt(sinkId, new BoltFileSink(outputFile, formatter)).shuffleGrouping(counterId);
} else {
builder.setBolt(sinkId, new BoltPrintSink(formatter), 4).shuffleGrouping(counterId);
}
return builder;
}
Aggregations