use of org.apache.flink.streaming.api.operators.Output in project flink by apache.
the class BoltCollectorTest method testBoltStormCollector.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testBoltStormCollector() throws InstantiationException, IllegalAccessException {
for (int numberOfAttributes = -1; numberOfAttributes < 26; ++numberOfAttributes) {
final Output flinkCollector = mock(Output.class);
Tuple flinkTuple = null;
final Values tuple = new Values();
BoltCollector<?> collector;
final String streamId = "streamId";
HashMap<String, Integer> attributes = new HashMap<String, Integer>();
attributes.put(streamId, numberOfAttributes);
if (numberOfAttributes == -1) {
collector = new BoltCollector(attributes, -1, flinkCollector);
tuple.add(new Integer(this.r.nextInt()));
} else {
collector = new BoltCollector(attributes, -1, flinkCollector);
flinkTuple = Tuple.getTupleClass(numberOfAttributes).newInstance();
for (int i = 0; i < numberOfAttributes; ++i) {
tuple.add(new Integer(this.r.nextInt()));
flinkTuple.setField(tuple.get(i), i);
}
}
final Collection anchors = mock(Collection.class);
final List<Integer> taskIds;
taskIds = collector.emit(streamId, anchors, tuple);
Assert.assertNull(taskIds);
if (numberOfAttributes == -1) {
verify(flinkCollector).collect(tuple.get(0));
} else {
verify(flinkCollector).collect(flinkTuple);
}
}
}
use of org.apache.flink.streaming.api.operators.Output in project flink by apache.
the class BoltWrapperTest method testMultipleOutputStreams.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testMultipleOutputStreams() throws Exception {
final boolean rawOutType1 = super.r.nextBoolean();
final boolean rawOutType2 = super.r.nextBoolean();
final StreamRecord record = mock(StreamRecord.class);
when(record.getValue()).thenReturn(2).thenReturn(3);
final Output output = mock(Output.class);
final TestBolt bolt = new TestBolt();
final HashSet<String> raw = new HashSet<String>();
if (rawOutType1) {
raw.add("stream1");
}
if (rawOutType2) {
raw.add("stream2");
}
final BoltWrapper wrapper = new BoltWrapper(bolt, null, raw);
wrapper.setup(createMockStreamTask(), new StreamConfig(new Configuration()), output);
wrapper.open();
final SplitStreamType splitRecord = new SplitStreamType<Integer>();
if (rawOutType1) {
splitRecord.streamId = "stream1";
splitRecord.value = 2;
} else {
splitRecord.streamId = "stream1";
splitRecord.value = new Tuple1<Integer>(2);
}
wrapper.processElement(record);
verify(output).collect(new StreamRecord<SplitStreamType>(splitRecord));
if (rawOutType2) {
splitRecord.streamId = "stream2";
splitRecord.value = 3;
} else {
splitRecord.streamId = "stream2";
splitRecord.value = new Tuple1<Integer>(3);
}
wrapper.processElement(record);
verify(output, times(2)).collect(new StreamRecord<SplitStreamType>(splitRecord));
}
use of org.apache.flink.streaming.api.operators.Output in project beam by apache.
the class DoFnOperator method initializeState.
@Override
public void initializeState(StateInitializationContext context) throws Exception {
super.initializeState(context);
ListStateDescriptor<WindowedValue<InputT>> pushedBackStateDescriptor = new ListStateDescriptor<>("pushed-back-elements", new CoderTypeSerializer<>(windowedInputCoder, serializedOptions));
if (keySelector != null) {
pushedBackElementsHandler = KeyedPushedBackElementsHandler.create(keySelector, getKeyedStateBackend(), pushedBackStateDescriptor);
} else {
ListState<WindowedValue<InputT>> listState = getOperatorStateBackend().getListState(pushedBackStateDescriptor);
pushedBackElementsHandler = NonKeyedPushedBackElementsHandler.create(listState);
}
currentInputWatermark = BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis();
currentSideInputWatermark = BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis();
currentOutputWatermark = BoundedWindow.TIMESTAMP_MIN_VALUE.getMillis();
sideInputReader = NullSideInputReader.of(sideInputs);
if (!sideInputs.isEmpty()) {
FlinkBroadcastStateInternals sideInputStateInternals = new FlinkBroadcastStateInternals<>(getContainingTask().getIndexInSubtaskGroup(), getOperatorStateBackend(), serializedOptions);
sideInputHandler = new SideInputHandler(sideInputs, sideInputStateInternals);
sideInputReader = sideInputHandler;
Stream<WindowedValue<InputT>> pushedBack = pushedBackElementsHandler.getElements();
long min = pushedBack.map(v -> v.getTimestamp().getMillis()).reduce(Long.MAX_VALUE, Math::min);
pushedBackWatermark = min;
} else {
pushedBackWatermark = Long.MAX_VALUE;
}
// StatefulPardo or WindowDoFn
if (keyCoder != null) {
keyedStateInternals = new FlinkStateInternals<>((KeyedStateBackend) getKeyedStateBackend(), keyCoder, serializedOptions);
if (timerService == null) {
timerService = getInternalTimerService("beam-timer", new CoderTypeSerializer<>(timerCoder, serializedOptions), this);
}
timerInternals = new FlinkTimerInternals();
timeServiceManagerCompat = getTimeServiceManagerCompat();
}
outputManager = outputManagerFactory.create(output, getLockToAcquireForStateAccessDuringBundles(), getOperatorStateBackend());
}
use of org.apache.flink.streaming.api.operators.Output in project flink by apache.
the class BoltCollectorTest method testBoltStormCollectorWithTaskId.
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testBoltStormCollectorWithTaskId() throws InstantiationException, IllegalAccessException {
for (int numberOfAttributes = 0; numberOfAttributes < 25; ++numberOfAttributes) {
final Output flinkCollector = mock(Output.class);
final int taskId = 42;
final String streamId = "streamId";
HashMap<String, Integer> attributes = new HashMap<String, Integer>();
attributes.put(streamId, numberOfAttributes);
BoltCollector<?> collector = new BoltCollector(attributes, taskId, flinkCollector);
final Values tuple = new Values();
final Tuple flinkTuple = Tuple.getTupleClass(numberOfAttributes + 1).newInstance();
for (int i = 0; i < numberOfAttributes; ++i) {
tuple.add(new Integer(this.r.nextInt()));
flinkTuple.setField(tuple.get(i), i);
}
flinkTuple.setField(taskId, numberOfAttributes);
final Collection anchors = mock(Collection.class);
final List<Integer> taskIds;
taskIds = collector.emit(streamId, anchors, tuple);
Assert.assertNull(taskIds);
verify(flinkCollector).collect(flinkTuple);
}
}
use of org.apache.flink.streaming.api.operators.Output in project flink by apache.
the class OperatorChain method createOutputCollector.
// ------------------------------------------------------------------------
// initialization utilities
// ------------------------------------------------------------------------
private <T> Output<StreamRecord<T>> createOutputCollector(StreamTask<?, ?> containingTask, StreamConfig operatorConfig, Map<Integer, StreamConfig> chainedConfigs, ClassLoader userCodeClassloader, Map<StreamEdge, RecordWriterOutput<?>> streamOutputs, List<StreamOperator<?>> allOperators) {
List<Tuple2<Output<StreamRecord<T>>, StreamEdge>> allOutputs = new ArrayList<>(4);
// create collectors for the network outputs
for (StreamEdge outputEdge : operatorConfig.getNonChainedOutputs(userCodeClassloader)) {
@SuppressWarnings("unchecked") RecordWriterOutput<T> output = (RecordWriterOutput<T>) streamOutputs.get(outputEdge);
allOutputs.add(new Tuple2<Output<StreamRecord<T>>, StreamEdge>(output, outputEdge));
}
// Create collectors for the chained outputs
for (StreamEdge outputEdge : operatorConfig.getChainedOutputs(userCodeClassloader)) {
int outputId = outputEdge.getTargetId();
StreamConfig chainedOpConfig = chainedConfigs.get(outputId);
Output<StreamRecord<T>> output = createChainedOperator(containingTask, chainedOpConfig, chainedConfigs, userCodeClassloader, streamOutputs, allOperators, outputEdge.getOutputTag());
allOutputs.add(new Tuple2<>(output, outputEdge));
}
// if there are multiple outputs, or the outputs are directed, we need to
// wrap them as one output
List<OutputSelector<T>> selectors = operatorConfig.getOutputSelectors(userCodeClassloader);
if (selectors == null || selectors.isEmpty()) {
// simple path, no selector necessary
if (allOutputs.size() == 1) {
return allOutputs.get(0).f0;
} else {
// send to N outputs. Note that this includes teh special case
// of sending to zero outputs
@SuppressWarnings({ "unchecked", "rawtypes" }) Output<StreamRecord<T>>[] asArray = new Output[allOutputs.size()];
for (int i = 0; i < allOutputs.size(); i++) {
asArray[i] = allOutputs.get(i).f0;
}
// otherwise multi-chaining would not work correctly.
if (containingTask.getExecutionConfig().isObjectReuseEnabled()) {
return new CopyingBroadcastingOutputCollector<>(asArray, this);
} else {
return new BroadcastingOutputCollector<>(asArray, this);
}
}
} else {
// otherwise multi-chaining would not work correctly.
if (containingTask.getExecutionConfig().isObjectReuseEnabled()) {
return new CopyingDirectedOutput<>(selectors, allOutputs);
} else {
return new DirectedOutput<>(selectors, allOutputs);
}
}
}
Aggregations