Search in sources :

Example 76 with Tester

use of com.ibm.streamsx.topology.tester.Tester in project streamsx.topology by IBMStreams.

the class MqttStreamsTest method completeAndValidateUnordered.

// would be nice if Tester provided this too
private void completeAndValidateUnordered(String msg, Topology topology, TStream<String> stream, int secTimeout, String... expected) throws Exception {
    Tester tester = topology.getTester();
    Condition<Long> sCount = tester.tupleCount(stream, expected.length);
    Condition<List<String>> sContents = tester.stringContentsUnordered(stream, expected);
    complete(tester, sCount, secTimeout, TimeUnit.SECONDS);
    assertTrue(msg + " contents:" + sContents, sContents.valid());
    assertTrue("valid:" + sCount, sCount.valid());
}
Also used : Tester(com.ibm.streamsx.topology.tester.Tester) ArrayList(java.util.ArrayList) List(java.util.List)

Example 77 with Tester

use of com.ibm.streamsx.topology.tester.Tester in project streamsx.kafka by IBMStreams.

the class KafkaConsumerFanInTest method kafkaFanInTest.

@Test
public void kafkaFanInTest() throws Exception {
    Topology topo = getTopology();
    // create the producer (produces tuples after a short delay)
    TStream<String> stringSrcStream = topo.strings(Constants.STRING_DATA).modify(new Delay<>(5000));
    SPL.invokeSink(Constants.KafkaProducerOp, KafkaSPLStreamsUtils.convertStreamToKafkaTuple(stringSrcStream), getKafkaParams());
    // create the consumer
    SPLStream consumerStream1 = SPL.invokeSource(topo, Constants.KafkaConsumerOp, getConsumerParams(1), KafkaSPLStreamsUtils.STRING_SCHEMA);
    SPLStream consumerStream2 = SPL.invokeSource(topo, Constants.KafkaConsumerOp, getConsumerParams(2), KafkaSPLStreamsUtils.STRING_SCHEMA);
    SPLStream unionStream = KafkaSPLStreamsUtils.union(Arrays.asList(consumerStream1, consumerStream2), KafkaSPLStreamsUtils.STRING_SCHEMA);
    SPLStream msgStream = SPLStreams.stringToSPLStream(unionStream.convert(t -> t.getString("message")));
    // test the output of the consumer
    StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
    Tester tester = topo.getTester();
    // both consumers consume the same data, so each result is duplicated
    String[] expectedArr = KafkaSPLStreamsUtils.duplicateArrayEntries(Constants.STRING_DATA, 2);
    Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, expectedArr);
    tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
    // check the results
    Assert.assertTrue(condition.getResult().size() > 0);
    Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
Also used : TStream(com.ibm.streamsx.topology.TStream) Tester(com.ibm.streamsx.topology.tester.Tester) Arrays(java.util.Arrays) Delay(com.ibm.streamsx.kafka.test.utils.Delay) StreamsContextFactory(com.ibm.streamsx.topology.context.StreamsContextFactory) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) HashMap(java.util.HashMap) Test(org.junit.Test) KafkaSPLStreamsUtils(com.ibm.streamsx.kafka.test.utils.KafkaSPLStreamsUtils) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) StreamsContext(com.ibm.streamsx.topology.context.StreamsContext) Constants(com.ibm.streamsx.kafka.test.utils.Constants) Map(java.util.Map) SPL(com.ibm.streamsx.topology.spl.SPL) Condition(com.ibm.streamsx.topology.tester.Condition) Type(com.ibm.streamsx.topology.context.StreamsContext.Type) SPLStreams(com.ibm.streamsx.topology.spl.SPLStreams) Assert(org.junit.Assert) Tester(com.ibm.streamsx.topology.tester.Tester) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) Test(org.junit.Test)

Example 78 with Tester

use of com.ibm.streamsx.topology.tester.Tester in project streamsx.kafka by IBMStreams.

the class KafkaOperatorsAttrNameParamsTest method kafkaAttrNameParamsTest.

@Test
public void kafkaAttrNameParamsTest() throws Exception {
    Topology topo = getTopology();
    StreamSchema producerSchema = com.ibm.streams.operator.Type.Factory.getStreamSchema("tuple<int32 " + PROD_KEY_ATTR_NAME + ", rstring " + PROD_MSG_ATTR_NAME + ", rstring " + PROD_TOPIC_ATTR_NAME + ", int32 " + PROD_PARTITION_ATTR_NAME + ">");
    // create the producer (produces tuples after a short delay)
    Map<String, Object> producerProps = new HashMap<>();
    producerProps.put("propertiesFile", Constants.PROPERTIES_FILE_PATH);
    producerProps.put("messageAttribute", producerSchema.getAttribute(PROD_MSG_ATTR_NAME));
    producerProps.put("keyAttribute", producerSchema.getAttribute(PROD_KEY_ATTR_NAME));
    producerProps.put("topicAttribute", producerSchema.getAttribute(PROD_TOPIC_ATTR_NAME));
    producerProps.put("partitionAttribute", producerSchema.getAttribute(PROD_PARTITION_ATTR_NAME));
    TStream<String> srcStream = topo.strings(MSG).modify(new Delay<>(5000));
    SPL.invokeSink(Constants.KafkaProducerOp, SPLStreams.convertStream(srcStream, new ProducerConverter(), producerSchema), producerProps);
    // create the consumer
    StreamSchema consumerSchema = com.ibm.streams.operator.Type.Factory.getStreamSchema("tuple<int32 " + CONS_KEY_ATTR_NAME + ", rstring " + CONS_MSG_ATTR_NAME + ", rstring " + CONS_TOPIC_ATTR_NAME + ">");
    Map<String, Object> consumerProps = new HashMap<String, Object>();
    consumerProps.put("propertiesFile", Constants.PROPERTIES_FILE_PATH);
    consumerProps.put("outputMessageAttributeName", CONS_MSG_ATTR_NAME);
    consumerProps.put("outputKeyAttributeName", CONS_KEY_ATTR_NAME);
    consumerProps.put("outputTopicAttributeName", CONS_TOPIC_ATTR_NAME);
    consumerProps.put("topic", Constants.TOPIC_TEST);
    SPLStream consumerStream = SPL.invokeSource(topo, Constants.KafkaConsumerOp, consumerProps, consumerSchema);
    SPLStream msgStream = SPLStreams.stringToSPLStream(consumerStream.convert(t -> {
        return t.getString(CONS_TOPIC_ATTR_NAME) + ":" + t.getInt(CONS_KEY_ATTR_NAME) + ":" + t.getString(CONS_MSG_ATTR_NAME);
    }));
    // test the output of the consumer
    StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
    Tester tester = topo.getTester();
    Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, Constants.TOPIC_TEST + ":" + KEY + ":" + MSG);
    tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
    // check the results
    Assert.assertTrue(condition.getResult().size() > 0);
    Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
Also used : TStream(com.ibm.streamsx.topology.TStream) Tester(com.ibm.streamsx.topology.tester.Tester) Delay(com.ibm.streamsx.kafka.test.utils.Delay) BiFunction(com.ibm.streamsx.topology.function.BiFunction) StreamsContextFactory(com.ibm.streamsx.topology.context.StreamsContextFactory) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) HashMap(java.util.HashMap) Test(org.junit.Test) StreamSchema(com.ibm.streams.operator.StreamSchema) OutputTuple(com.ibm.streams.operator.OutputTuple) KafkaSPLStreamsUtils(com.ibm.streamsx.kafka.test.utils.KafkaSPLStreamsUtils) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) StreamsContext(com.ibm.streamsx.topology.context.StreamsContext) Constants(com.ibm.streamsx.kafka.test.utils.Constants) Map(java.util.Map) SPL(com.ibm.streamsx.topology.spl.SPL) Condition(com.ibm.streamsx.topology.tester.Condition) Type(com.ibm.streamsx.topology.context.StreamsContext.Type) SPLStreams(com.ibm.streamsx.topology.spl.SPLStreams) Assert(org.junit.Assert) Tester(com.ibm.streamsx.topology.tester.Tester) HashMap(java.util.HashMap) Topology(com.ibm.streamsx.topology.Topology) StreamSchema(com.ibm.streams.operator.StreamSchema) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) List(java.util.List) Test(org.junit.Test)

Example 79 with Tester

use of com.ibm.streamsx.topology.tester.Tester in project streamsx.kafka by IBMStreams.

the class KafkaOperatorsIntTypeTest method kafkaIntTypeTest.

@Test
public void kafkaIntTypeTest() throws Exception {
    Topology topo = getTopology();
    StreamSchema schema = KafkaSPLStreamsUtils.INT_SCHEMA;
    // create the producer (produces tuples after a short delay)
    TStream<Integer> srcStream = topo.strings(DATA).transform(s -> Integer.valueOf(s)).modify(new Delay<>(5000));
    SPLStream splSrcStream = SPLStreams.convertStream(srcStream, new Converter(), schema);
    SPL.invokeSink(Constants.KafkaProducerOp, splSrcStream, getKafkaParams());
    // create the consumer
    SPLStream consumerStream = SPL.invokeSource(topo, Constants.KafkaConsumerOp, getKafkaParams(), schema);
    SPLStream msgStream = SPLStreams.stringToSPLStream(consumerStream.convert(t -> String.valueOf(t.getInt("message"))));
    // test the output of the consumer
    StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
    Tester tester = topo.getTester();
    Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, DATA);
    tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
    // check the results
    Assert.assertTrue(condition.getResult().size() > 0);
    Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
Also used : TStream(com.ibm.streamsx.topology.TStream) Tester(com.ibm.streamsx.topology.tester.Tester) Delay(com.ibm.streamsx.kafka.test.utils.Delay) BiFunction(com.ibm.streamsx.topology.function.BiFunction) StreamsContextFactory(com.ibm.streamsx.topology.context.StreamsContextFactory) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) HashMap(java.util.HashMap) Test(org.junit.Test) StreamSchema(com.ibm.streams.operator.StreamSchema) OutputTuple(com.ibm.streams.operator.OutputTuple) KafkaSPLStreamsUtils(com.ibm.streamsx.kafka.test.utils.KafkaSPLStreamsUtils) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) StreamsContext(com.ibm.streamsx.topology.context.StreamsContext) Constants(com.ibm.streamsx.kafka.test.utils.Constants) Map(java.util.Map) SPL(com.ibm.streamsx.topology.spl.SPL) Condition(com.ibm.streamsx.topology.tester.Condition) Type(com.ibm.streamsx.topology.context.StreamsContext.Type) SPLStreams(com.ibm.streamsx.topology.spl.SPLStreams) Assert(org.junit.Assert) Tester(com.ibm.streamsx.topology.tester.Tester) Topology(com.ibm.streamsx.topology.Topology) StreamSchema(com.ibm.streams.operator.StreamSchema) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) List(java.util.List) Test(org.junit.Test)

Example 80 with Tester

use of com.ibm.streamsx.topology.tester.Tester in project streamsx.kafka by IBMStreams.

the class KafkaOperatorsNoKey method kafkaNoKeyTest.

@Test
public void kafkaNoKeyTest() throws Exception {
    Topology topo = getTopology();
    // create the producer (produces tuples after a short delay)
    TStream<String> stringSrcStream = topo.strings(Constants.STRING_DATA).modify(new Delay<>(5000));
    SPL.invokeSink(Constants.KafkaProducerOp, KafkaSPLStreamsUtils.convertStreamToKafkaTuple(stringSrcStream, false), getKafkaParams());
    // create the consumer
    SPLStream consumerStream = SPL.invokeSource(topo, Constants.KafkaConsumerOp, getKafkaParams(), KafkaSPLStreamsUtils.STRING_NOKEY_SCHEMA);
    SPLStream msgStream = SPLStreams.stringToSPLStream(consumerStream.convert(t -> t.getString("message")));
    // test the output of the consumer
    StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
    Tester tester = topo.getTester();
    Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, Constants.STRING_DATA);
    tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
    // check the results
    Assert.assertTrue(condition.getResult().size() > 0);
    Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
Also used : TStream(com.ibm.streamsx.topology.TStream) Tester(com.ibm.streamsx.topology.tester.Tester) Delay(com.ibm.streamsx.kafka.test.utils.Delay) StreamsContextFactory(com.ibm.streamsx.topology.context.StreamsContextFactory) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) HashMap(java.util.HashMap) Test(org.junit.Test) KafkaSPLStreamsUtils(com.ibm.streamsx.kafka.test.utils.KafkaSPLStreamsUtils) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) StreamsContext(com.ibm.streamsx.topology.context.StreamsContext) Constants(com.ibm.streamsx.kafka.test.utils.Constants) Map(java.util.Map) SPL(com.ibm.streamsx.topology.spl.SPL) Condition(com.ibm.streamsx.topology.tester.Condition) Type(com.ibm.streamsx.topology.context.StreamsContext.Type) SPLStreams(com.ibm.streamsx.topology.spl.SPLStreams) Assert(org.junit.Assert) Tester(com.ibm.streamsx.topology.tester.Tester) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) Test(org.junit.Test)

Aggregations

Tester (com.ibm.streamsx.topology.tester.Tester)82 Topology (com.ibm.streamsx.topology.Topology)75 Test (org.junit.Test)74 List (java.util.List)64 TestTopology (com.ibm.streamsx.topology.test.TestTopology)60 SPLStream (com.ibm.streamsx.topology.spl.SPLStream)34 ArrayList (java.util.ArrayList)28 TStream (com.ibm.streamsx.topology.TStream)22 HashMap (java.util.HashMap)22 Map (java.util.Map)15 Condition (com.ibm.streamsx.topology.tester.Condition)14 StreamsContext (com.ibm.streamsx.topology.context.StreamsContext)13 StreamsContextFactory (com.ibm.streamsx.topology.context.StreamsContextFactory)13 Random (java.util.Random)13 TimeUnit (java.util.concurrent.TimeUnit)13 OutputTuple (com.ibm.streams.operator.OutputTuple)12 StreamSchema (com.ibm.streams.operator.StreamSchema)12 Tuple (com.ibm.streams.operator.Tuple)12 Constants (com.ibm.streamsx.kafka.test.utils.Constants)12 KafkaSPLStreamsUtils (com.ibm.streamsx.kafka.test.utils.KafkaSPLStreamsUtils)12