use of org.apache.flink.storm.util.StormConfig in project flink by apache.
the class BoltWrapper method open.
@Override
public void open() throws Exception {
super.open();
this.flinkCollector = new TimestampedCollector<>(this.output);
GlobalJobParameters config = getExecutionConfig().getGlobalJobParameters();
StormConfig stormConfig = new StormConfig();
if (config != null) {
if (config instanceof StormConfig) {
stormConfig = (StormConfig) config;
} else {
stormConfig.putAll(config.toMap());
}
}
this.topologyContext = WrapperSetupHelper.createTopologyContext(getRuntimeContext(), this.bolt, this.name, this.stormTopology, stormConfig);
final OutputCollector stormCollector = new OutputCollector(new BoltCollector<OUT>(this.numberOfAttributes, this.topologyContext.getThisTaskId(), this.flinkCollector));
if (this.stormTopology != null) {
Map<GlobalStreamId, Grouping> inputs = this.topologyContext.getThisSources();
for (GlobalStreamId inputStream : inputs.keySet()) {
for (Integer tid : this.topologyContext.getComponentTasks(inputStream.get_componentId())) {
this.inputComponentIds.put(tid, inputStream.get_componentId());
this.inputStreamIds.put(tid, inputStream.get_streamId());
this.inputSchemas.put(tid, this.topologyContext.getComponentOutputFields(inputStream));
}
}
}
this.bolt.prepare(stormConfig, this.topologyContext, stormCollector);
}
use of org.apache.flink.storm.util.StormConfig in project flink by apache.
the class SpoutWrapper method run.
@Override
public final void run(final SourceContext<OUT> ctx) throws Exception {
final GlobalJobParameters config = super.getRuntimeContext().getExecutionConfig().getGlobalJobParameters();
StormConfig stormConfig = new StormConfig();
if (config != null) {
if (config instanceof StormConfig) {
stormConfig = (StormConfig) config;
} else {
stormConfig.putAll(config.toMap());
}
}
final TopologyContext stormTopologyContext = WrapperSetupHelper.createTopologyContext((StreamingRuntimeContext) super.getRuntimeContext(), this.spout, this.name, this.stormTopology, stormConfig);
SpoutCollector<OUT> collector = new SpoutCollector<OUT>(this.numberOfAttributes, stormTopologyContext.getThisTaskId(), ctx);
this.spout.open(stormConfig, stormTopologyContext, new SpoutOutputCollector(collector));
this.spout.activate();
if (numberOfInvocations == null) {
if (this.spout instanceof FiniteSpout) {
final FiniteSpout finiteSpout = (FiniteSpout) this.spout;
while (this.isRunning && !finiteSpout.reachedEnd()) {
finiteSpout.nextTuple();
}
} else {
while (this.isRunning) {
this.spout.nextTuple();
}
}
} else {
int counter = this.numberOfInvocations;
if (counter >= 0) {
while ((--counter >= 0) && this.isRunning) {
this.spout.nextTuple();
}
} else {
do {
collector.tupleEmitted = false;
this.spout.nextTuple();
} while (collector.tupleEmitted && this.isRunning);
}
}
}
use of org.apache.flink.storm.util.StormConfig in project flink by apache.
the class BoltWrapperTest method testOpenSink.
@SuppressWarnings("unchecked")
@Test
public void testOpenSink() throws Exception {
final StormConfig stormConfig = new StormConfig();
final Configuration flinkConfig = new Configuration();
final ExecutionConfig taskConfig = mock(ExecutionConfig.class);
when(taskConfig.getGlobalJobParameters()).thenReturn(null).thenReturn(stormConfig).thenReturn(flinkConfig);
final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
when(taskContext.getExecutionConfig()).thenReturn(taskConfig);
when(taskContext.getTaskName()).thenReturn("name");
when(taskContext.getMetricGroup()).thenReturn(new UnregisteredMetricsGroup());
final IRichBolt bolt = mock(IRichBolt.class);
BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(bolt);
wrapper.setup(createMockStreamTask(), new StreamConfig(new Configuration()), mock(Output.class));
wrapper.open();
verify(bolt).prepare(any(Map.class), any(TopologyContext.class), isNotNull(OutputCollector.class));
}
use of org.apache.flink.storm.util.StormConfig in project flink by apache.
the class BoltWrapperTest method testOpen.
@SuppressWarnings("unchecked")
@Test
public void testOpen() throws Exception {
// utility mocks
final StormConfig stormConfig = new StormConfig();
final Configuration flinkConfig = new Configuration();
final ExecutionConfig taskConfig = mock(ExecutionConfig.class);
when(taskConfig.getGlobalJobParameters()).thenReturn(null).thenReturn(stormConfig).thenReturn(flinkConfig);
final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
when(taskContext.getExecutionConfig()).thenReturn(taskConfig);
when(taskContext.getTaskName()).thenReturn("name");
when(taskContext.getMetricGroup()).thenReturn(new UnregisteredMetricsGroup());
final SetupOutputFieldsDeclarer declarer = new SetupOutputFieldsDeclarer();
declarer.declare(new Fields("dummy"));
PowerMockito.whenNew(SetupOutputFieldsDeclarer.class).withNoArguments().thenReturn(declarer);
// (1) open with no configuration
{
ExecutionConfig execConfig = mock(ExecutionConfig.class);
when(execConfig.getGlobalJobParameters()).thenReturn(null);
final IRichBolt bolt = mock(IRichBolt.class);
BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(bolt);
wrapper.setup(createMockStreamTask(execConfig), new StreamConfig(new Configuration()), mock(Output.class));
wrapper.open();
verify(bolt).prepare(any(Map.class), any(TopologyContext.class), any(OutputCollector.class));
}
// (2) open with a storm specific configuration
{
ExecutionConfig execConfig = mock(ExecutionConfig.class);
when(execConfig.getGlobalJobParameters()).thenReturn(stormConfig);
final IRichBolt bolt = mock(IRichBolt.class);
BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(bolt);
wrapper.setup(createMockStreamTask(execConfig), new StreamConfig(new Configuration()), mock(Output.class));
wrapper.open();
verify(bolt).prepare(same(stormConfig), any(TopologyContext.class), any(OutputCollector.class));
}
// (3) open with a flink config
{
final Configuration cfg = new Configuration();
cfg.setString("foo", "bar");
cfg.setInteger("the end (the int)", Integer.MAX_VALUE);
ExecutionConfig execConfig = mock(ExecutionConfig.class);
when(execConfig.getGlobalJobParameters()).thenReturn(new UnmodifiableConfiguration(cfg));
TestDummyBolt testBolt = new TestDummyBolt();
BoltWrapper<Object, Object> wrapper = new BoltWrapper<Object, Object>(testBolt);
wrapper.setup(createMockStreamTask(execConfig), new StreamConfig(new Configuration()), mock(Output.class));
wrapper.open();
for (Entry<String, String> entry : cfg.toMap().entrySet()) {
Assert.assertEquals(entry.getValue(), testBolt.config.get(entry.getKey()));
}
}
}
use of org.apache.flink.storm.util.StormConfig in project flink by apache.
the class SpoutWrapperTest method testRunPrepare.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testRunPrepare() throws Exception {
final StormConfig stormConfig = new StormConfig();
stormConfig.put(this.r.nextInt(), this.r.nextInt());
final Configuration flinkConfig = new Configuration();
flinkConfig.setInteger("testKey", this.r.nextInt());
final ExecutionConfig taskConfig = mock(ExecutionConfig.class);
when(taskConfig.getGlobalJobParameters()).thenReturn(null).thenReturn(stormConfig).thenReturn(flinkConfig);
final StreamingRuntimeContext taskContext = mock(StreamingRuntimeContext.class);
when(taskContext.getExecutionConfig()).thenReturn(taskConfig);
when(taskContext.getTaskName()).thenReturn("name");
final IRichSpout spout = mock(IRichSpout.class);
SpoutWrapper spoutWrapper = new SpoutWrapper(spout);
spoutWrapper.setRuntimeContext(taskContext);
spoutWrapper.cancel();
// test without configuration
spoutWrapper.run(mock(SourceContext.class));
verify(spout).open(any(Map.class), any(TopologyContext.class), any(SpoutOutputCollector.class));
// test with StormConfig
spoutWrapper.run(mock(SourceContext.class));
verify(spout).open(eq(stormConfig), any(TopologyContext.class), any(SpoutOutputCollector.class));
// test with Configuration
final TestDummySpout testSpout = new TestDummySpout();
spoutWrapper = new SpoutWrapper(testSpout);
spoutWrapper.setRuntimeContext(taskContext);
spoutWrapper.cancel();
spoutWrapper.run(mock(SourceContext.class));
for (Entry<String, String> entry : flinkConfig.toMap().entrySet()) {
Assert.assertEquals(entry.getValue(), testSpout.config.get(entry.getKey()));
}
}
Aggregations