use of org.hamcrest.Matchers in project spring-integration by spring-projects.
the class ReactiveStreamsConsumerTests method testReactiveStreamsConsumerViaConsumerEndpointFactoryBean.
@Test
public void testReactiveStreamsConsumerViaConsumerEndpointFactoryBean() throws Exception {
FluxMessageChannel testChannel = new FluxMessageChannel();
List<Message<?>> result = new LinkedList<>();
CountDownLatch stopLatch = new CountDownLatch(3);
MessageHandler messageHandler = m -> {
result.add(m);
stopLatch.countDown();
};
ConsumerEndpointFactoryBean endpointFactoryBean = new ConsumerEndpointFactoryBean();
endpointFactoryBean.setBeanFactory(mock(ConfigurableBeanFactory.class));
endpointFactoryBean.setInputChannel(testChannel);
endpointFactoryBean.setHandler(messageHandler);
endpointFactoryBean.setBeanName("reactiveConsumer");
endpointFactoryBean.afterPropertiesSet();
endpointFactoryBean.start();
Message<?> testMessage = new GenericMessage<>("test");
testChannel.send(testMessage);
endpointFactoryBean.stop();
try {
testChannel.send(testMessage);
} catch (Exception e) {
assertThat(e, instanceOf(MessageDeliveryException.class));
assertThat(e.getCause(), instanceOf(IllegalStateException.class));
assertThat(e.getMessage(), containsString("doesn't have subscribers to accept messages"));
}
endpointFactoryBean.start();
Message<?> testMessage2 = new GenericMessage<>("test2");
testChannel.send(testMessage2);
testChannel.send(testMessage2);
assertTrue(stopLatch.await(10, TimeUnit.SECONDS));
assertThat(result.size(), equalTo(3));
assertThat(result, Matchers.<Message<?>>contains(testMessage, testMessage2, testMessage2));
}
use of org.hamcrest.Matchers in project spring-integration by spring-projects.
the class ReactiveStreamsConsumerTests method testReactiveStreamsConsumerFluxMessageChannel.
@Test
public void testReactiveStreamsConsumerFluxMessageChannel() throws InterruptedException {
FluxMessageChannel testChannel = new FluxMessageChannel();
List<Message<?>> result = new LinkedList<>();
CountDownLatch stopLatch = new CountDownLatch(2);
MessageHandler messageHandler = m -> {
result.add(m);
stopLatch.countDown();
};
MessageHandler testSubscriber = new MethodInvokingMessageHandler(messageHandler, (String) null);
ReactiveStreamsConsumer reactiveConsumer = new ReactiveStreamsConsumer(testChannel, testSubscriber);
reactiveConsumer.setBeanFactory(mock(BeanFactory.class));
reactiveConsumer.afterPropertiesSet();
reactiveConsumer.start();
Message<?> testMessage = new GenericMessage<>("test");
testChannel.send(testMessage);
reactiveConsumer.stop();
try {
testChannel.send(testMessage);
} catch (Exception e) {
assertThat(e, instanceOf(MessageDeliveryException.class));
assertThat(e.getCause(), instanceOf(IllegalStateException.class));
assertThat(e.getMessage(), containsString("doesn't have subscribers to accept messages"));
}
reactiveConsumer.start();
Message<?> testMessage2 = new GenericMessage<>("test2");
testChannel.send(testMessage2);
assertTrue(stopLatch.await(10, TimeUnit.SECONDS));
assertThat(result, Matchers.<Message<?>>contains(testMessage, testMessage2));
}
use of org.hamcrest.Matchers in project beam by apache.
the class GreedyPipelineFuserTest method flattenWithHeterogenousInputsAndOutputsEntirelyMaterialized.
/*
* goImpulse -> .out -> goRead -> .out \ -> goParDo -> .out
* -> flatten -> .out |
* pyImpulse -> .out -> pyRead -> .out / -> pyParDo -> .out
*
* becomes
* (goImpulse.out) -> goRead -> goRead.out -> flatten -> (flatten.out_synthetic0)
* (pyImpulse.out) -> pyRead -> pyRead.out -> flatten -> (flatten.out_synthetic1)
* flatten.out_synthetic0 & flatten.out_synthetic1 -> synthetic_flatten -> flatten.out
* (flatten.out) -> goParDo
* (flatten.out) -> pyParDo
*/
@Test
public void flattenWithHeterogenousInputsAndOutputsEntirelyMaterialized() {
Components components = Components.newBuilder().putCoders("coder", Coder.newBuilder().build()).putCoders("windowCoder", Coder.newBuilder().build()).putWindowingStrategies("ws", WindowingStrategy.newBuilder().setWindowCoderId("windowCoder").build()).putTransforms("pyImpulse", PTransform.newBuilder().setUniqueName("PyImpulse").putOutputs("output", "pyImpulse.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.IMPULSE_TRANSFORM_URN)).build()).putPcollections("pyImpulse.out", pc("pyImpulse.out")).putTransforms("pyRead", PTransform.newBuilder().setUniqueName("PyRead").putInputs("input", "pyImpulse.out").putOutputs("output", "pyRead.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).build().toByteString())).setEnvironmentId("py").build()).putPcollections("pyRead.out", pc("pyRead.out")).putTransforms("goImpulse", PTransform.newBuilder().setUniqueName("GoImpulse").putOutputs("output", "goImpulse.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.IMPULSE_TRANSFORM_URN)).build()).putPcollections("goImpulse.out", pc("goImpulse.out")).putTransforms("goRead", PTransform.newBuilder().setUniqueName("GoRead").putInputs("input", "goImpulse.out").putOutputs("output", "goRead.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).build().toByteString())).setEnvironmentId("go").build()).putPcollections("goRead.out", pc("goRead.out")).putTransforms("flatten", PTransform.newBuilder().setUniqueName("Flatten").putInputs("goReadInput", "goRead.out").putInputs("pyReadInput", "pyRead.out").putOutputs("output", "flatten.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.FLATTEN_TRANSFORM_URN)).build()).putPcollections("flatten.out", pc("flatten.out")).putTransforms("pyParDo", PTransform.newBuilder().setUniqueName("PyParDo").putInputs("input", "flatten.out").putOutputs("output", "pyParDo.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).build().toByteString())).setEnvironmentId("py").build()).putPcollections("pyParDo.out", pc("pyParDo.out")).putTransforms("goParDo", PTransform.newBuilder().setUniqueName("GoParDo").putInputs("input", "flatten.out").putOutputs("output", "goParDo.out").setSpec(FunctionSpec.newBuilder().setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN).setPayload(ParDoPayload.newBuilder().setDoFn(FunctionSpec.newBuilder()).build().toByteString())).setEnvironmentId("go").build()).putPcollections("goParDo.out", pc("goParDo.out")).putEnvironments("go", Environments.createDockerEnvironment("go")).putEnvironments("py", Environments.createDockerEnvironment("py")).build();
FusedPipeline fused = GreedyPipelineFuser.fuse(Pipeline.newBuilder().setComponents(components).build());
assertThat(fused.getRunnerExecutedTransforms(), hasSize(3));
assertThat("The runner should include the impulses for both languages, plus an introduced flatten", fused.getRunnerExecutedTransforms(), hasItems(PipelineNode.pTransform("pyImpulse", components.getTransformsOrThrow("pyImpulse")), PipelineNode.pTransform("goImpulse", components.getTransformsOrThrow("goImpulse"))));
PTransformNode flattenNode = null;
for (PTransformNode runnerTransform : fused.getRunnerExecutedTransforms()) {
if (getOnlyElement(runnerTransform.getTransform().getOutputsMap().values()).equals("flatten.out")) {
flattenNode = runnerTransform;
}
}
assertThat(flattenNode, not(nullValue()));
assertThat(flattenNode.getTransform().getSpec().getUrn(), equalTo(PTransformTranslation.FLATTEN_TRANSFORM_URN));
assertThat(new HashSet<>(flattenNode.getTransform().getInputsMap().values()), hasSize(2));
Collection<String> introducedOutputs = flattenNode.getTransform().getInputsMap().values();
AnyOf<String> anyIntroducedPCollection = anyOf(introducedOutputs.stream().map(Matchers::equalTo).collect(Collectors.toSet()));
assertThat(fused.getFusedStages(), containsInAnyOrder(ExecutableStageMatcher.withInput("goImpulse.out").withOutputs(anyIntroducedPCollection).withTransforms("goRead", "flatten"), ExecutableStageMatcher.withInput("pyImpulse.out").withOutputs(anyIntroducedPCollection).withTransforms("pyRead", "flatten"), ExecutableStageMatcher.withInput("flatten.out").withNoOutputs().withTransforms("goParDo"), ExecutableStageMatcher.withInput("flatten.out").withNoOutputs().withTransforms("pyParDo")));
Set<String> materializedStageOutputs = fused.getFusedStages().stream().flatMap(executableStage -> executableStage.getOutputPCollections().stream()).map(PCollectionNode::getId).collect(Collectors.toSet());
assertThat("All materialized stage outputs should be flattened, and no more", materializedStageOutputs, containsInAnyOrder(flattenNode.getTransform().getInputsMap().values().toArray(new String[0])));
}
Aggregations