Search in sources :

Example 56 with SPLStream

use of com.ibm.streamsx.topology.spl.SPLStream in project streamsx.kafka by IBMStreams.

the class KafkaOperatorsIntTypeTest method kafkaIntTypeTest.

@Test
public void kafkaIntTypeTest() throws Exception {
    Topology topo = getTopology();
    StreamSchema schema = KafkaSPLStreamsUtils.INT_SCHEMA;
    // create the producer (produces tuples after a short delay)
    TStream<Integer> srcStream = topo.strings(DATA).transform(s -> Integer.valueOf(s)).modify(new Delay<>(5000));
    SPLStream splSrcStream = SPLStreams.convertStream(srcStream, new Converter(), schema);
    SPL.invokeSink(Constants.KafkaProducerOp, splSrcStream, getKafkaParams());
    // create the consumer
    SPLStream consumerStream = SPL.invokeSource(topo, Constants.KafkaConsumerOp, getKafkaParams(), schema);
    SPLStream msgStream = SPLStreams.stringToSPLStream(consumerStream.convert(t -> String.valueOf(t.getInt("message"))));
    // test the output of the consumer
    StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
    Tester tester = topo.getTester();
    Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, DATA);
    tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
    // check the results
    Assert.assertTrue(condition.getResult().size() > 0);
    Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
Also used : TStream(com.ibm.streamsx.topology.TStream) Tester(com.ibm.streamsx.topology.tester.Tester) Delay(com.ibm.streamsx.kafka.test.utils.Delay) BiFunction(com.ibm.streamsx.topology.function.BiFunction) StreamsContextFactory(com.ibm.streamsx.topology.context.StreamsContextFactory) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) HashMap(java.util.HashMap) Test(org.junit.Test) StreamSchema(com.ibm.streams.operator.StreamSchema) OutputTuple(com.ibm.streams.operator.OutputTuple) KafkaSPLStreamsUtils(com.ibm.streamsx.kafka.test.utils.KafkaSPLStreamsUtils) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) StreamsContext(com.ibm.streamsx.topology.context.StreamsContext) Constants(com.ibm.streamsx.kafka.test.utils.Constants) Map(java.util.Map) SPL(com.ibm.streamsx.topology.spl.SPL) Condition(com.ibm.streamsx.topology.tester.Condition) Type(com.ibm.streamsx.topology.context.StreamsContext.Type) SPLStreams(com.ibm.streamsx.topology.spl.SPLStreams) Assert(org.junit.Assert) Tester(com.ibm.streamsx.topology.tester.Tester) Topology(com.ibm.streamsx.topology.Topology) StreamSchema(com.ibm.streams.operator.StreamSchema) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) List(java.util.List) Test(org.junit.Test)

Example 57 with SPLStream

use of com.ibm.streamsx.topology.spl.SPLStream in project streamsx.kafka by IBMStreams.

the class KafkaOperatorsNoKey method kafkaNoKeyTest.

@Test
public void kafkaNoKeyTest() throws Exception {
    Topology topo = getTopology();
    // create the producer (produces tuples after a short delay)
    TStream<String> stringSrcStream = topo.strings(Constants.STRING_DATA).modify(new Delay<>(5000));
    SPL.invokeSink(Constants.KafkaProducerOp, KafkaSPLStreamsUtils.convertStreamToKafkaTuple(stringSrcStream, false), getKafkaParams());
    // create the consumer
    SPLStream consumerStream = SPL.invokeSource(topo, Constants.KafkaConsumerOp, getKafkaParams(), KafkaSPLStreamsUtils.STRING_NOKEY_SCHEMA);
    SPLStream msgStream = SPLStreams.stringToSPLStream(consumerStream.convert(t -> t.getString("message")));
    // test the output of the consumer
    StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
    Tester tester = topo.getTester();
    Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, Constants.STRING_DATA);
    tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
    // check the results
    Assert.assertTrue(condition.getResult().size() > 0);
    Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
Also used : TStream(com.ibm.streamsx.topology.TStream) Tester(com.ibm.streamsx.topology.tester.Tester) Delay(com.ibm.streamsx.kafka.test.utils.Delay) StreamsContextFactory(com.ibm.streamsx.topology.context.StreamsContextFactory) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) HashMap(java.util.HashMap) Test(org.junit.Test) KafkaSPLStreamsUtils(com.ibm.streamsx.kafka.test.utils.KafkaSPLStreamsUtils) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) StreamsContext(com.ibm.streamsx.topology.context.StreamsContext) Constants(com.ibm.streamsx.kafka.test.utils.Constants) Map(java.util.Map) SPL(com.ibm.streamsx.topology.spl.SPL) Condition(com.ibm.streamsx.topology.tester.Condition) Type(com.ibm.streamsx.topology.context.StreamsContext.Type) SPLStreams(com.ibm.streamsx.topology.spl.SPLStreams) Assert(org.junit.Assert) Tester(com.ibm.streamsx.topology.tester.Tester) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) Test(org.junit.Test)

Example 58 with SPLStream

use of com.ibm.streamsx.topology.spl.SPLStream in project streamsx.kafka by IBMStreams.

the class KafkaProducerFanOutTest method kafkaFanOutTest.

@Test
public void kafkaFanOutTest() throws Exception {
    Topology topo = getTopology();
    // create the producers (produces tuples after a short delay)
    TStream<String> stringSrcStream = topo.strings(Constants.STRING_DATA).modify(new Delay<>(5000)).lowLatency();
    SPL.invokeSink(Constants.KafkaProducerOp, KafkaSPLStreamsUtils.convertStreamToKafkaTuple(stringSrcStream), getKafkaParams());
    SPL.invokeSink(Constants.KafkaProducerOp, KafkaSPLStreamsUtils.convertStreamToKafkaTuple(stringSrcStream), getKafkaParams());
    // create the consumer
    SPLStream consumerStream = SPL.invokeSource(topo, Constants.KafkaConsumerOp, getKafkaParams(), KafkaSPLStreamsUtils.STRING_SCHEMA);
    SPLStream msgStream = SPLStreams.stringToSPLStream(consumerStream.convert(t -> t.getString("message")));
    // test the output of the consumer
    StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
    Tester tester = topo.getTester();
    // both producers are sending the same data, so each result is duplicated
    String[] expectedArr = KafkaSPLStreamsUtils.duplicateArrayEntries(Constants.STRING_DATA, 2);
    Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, expectedArr);
    tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
    // check the results
    Assert.assertTrue(condition.getResult().size() > 0);
    Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
Also used : TStream(com.ibm.streamsx.topology.TStream) Tester(com.ibm.streamsx.topology.tester.Tester) Delay(com.ibm.streamsx.kafka.test.utils.Delay) StreamsContextFactory(com.ibm.streamsx.topology.context.StreamsContextFactory) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) HashMap(java.util.HashMap) Test(org.junit.Test) KafkaSPLStreamsUtils(com.ibm.streamsx.kafka.test.utils.KafkaSPLStreamsUtils) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) StreamsContext(com.ibm.streamsx.topology.context.StreamsContext) Constants(com.ibm.streamsx.kafka.test.utils.Constants) Map(java.util.Map) SPL(com.ibm.streamsx.topology.spl.SPL) Condition(com.ibm.streamsx.topology.tester.Condition) Type(com.ibm.streamsx.topology.context.StreamsContext.Type) SPLStreams(com.ibm.streamsx.topology.spl.SPLStreams) Assert(org.junit.Assert) Tester(com.ibm.streamsx.topology.tester.Tester) List(java.util.List) Topology(com.ibm.streamsx.topology.Topology) Delay(com.ibm.streamsx.kafka.test.utils.Delay) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) Test(org.junit.Test)

Example 59 with SPLStream

use of com.ibm.streamsx.topology.spl.SPLStream in project streamsx.kafka by IBMStreams.

the class KafkaProducerPartitionAttrTest method kafkaProducerPartitionAttrTest.

@Test
public void kafkaProducerPartitionAttrTest() throws Exception {
    Topology topo = getTopology();
    topo.addFileDependency("etc/custom_partitioner.properties", "etc");
    topo.addFileDependency("etc/custompartitioner.jar", "etc");
    // create producer
    TStream<Message<Integer, String>> src = topo.limitedSource(new MySupplier(), 9).modify(new Delay<>(Constants.PRODUCER_DELAY));
    SPLStream outStream = SPLStreams.convertStream(src, new MessageConverter(), PRODUCER_SCHEMA);
    SPL.invokeSink(Constants.KafkaProducerOp, outStream, getKafkaProducerParams());
    // create the consumers
    SPLStream msgStream1 = createConsumer(topo, PARTITION_NUM);
    TStream<String> unionStream = msgStream1.transform(t -> t.getString("message"));
    SPLStream msgStream = SPLStreams.stringToSPLStream(unionStream);
    StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
    Tester tester = topo.getTester();
    String[] expectedArr = { "A0", "B1", "C2", "A3", "B4", "C5", "A6", "B7", "C8" };
    Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, expectedArr);
    tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
    // check the results
    Assert.assertTrue(condition.getResult().size() > 0);
    Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
Also used : Tester(com.ibm.streamsx.topology.tester.Tester) Message(com.ibm.streamsx.kafka.test.utils.Message) Topology(com.ibm.streamsx.topology.Topology) SPLStream(com.ibm.streamsx.topology.spl.SPLStream) List(java.util.List) Test(org.junit.Test)

Example 60 with SPLStream

use of com.ibm.streamsx.topology.spl.SPLStream in project streamsx.topology by IBMStreams.

the class HTTPStreams method getJSON.

/**
 * Periodically poll a web service using HTTP {@code GET} for
 * {@code application/json} data. Declares a source stream that will contain
 * a single tuple for each successful {@code GET}. The tuple is the complete
 * JSON ({@code application/json} content) returned by the request.
 *
 * @param te
 *            Topology the source stream will be contained in.
 * @param url
 *            URL to poll.
 * @param period
 *            Polling period.
 * @param unit
 *            Unit for {@code period}.
 * @return Stream that will contain the JSON tuples from periodic HTTP
 *         {@code GET} requests.
 */
public static TStream<JSONObject> getJSON(TopologyElement te, String url, long period, TimeUnit unit) {
    Map<String, Object> params = new HashMap<>();
    params.put("url", url);
    double dperiod = (unit.toMillis(period) / 1000.0);
    params.put("period", dperiod);
    SPLStream rawJson = SPL.invokeSource(te, "com.ibm.streamsx.inet.http::HTTPGetJSONContent", params, JSONSchemas.JSON);
    TStream<String> string = SPLStreams.toStringStream(rawJson);
    return JSONStreams.deserialize(string);
}
Also used : HashMap(java.util.HashMap) JSONObject(com.ibm.json.java.JSONObject) SPLStream(com.ibm.streamsx.topology.spl.SPLStream)

Aggregations

SPLStream (com.ibm.streamsx.topology.spl.SPLStream)86 Topology (com.ibm.streamsx.topology.Topology)66 Test (org.junit.Test)56 TestTopology (com.ibm.streamsx.topology.test.TestTopology)49 Tester (com.ibm.streamsx.topology.tester.Tester)40 List (java.util.List)38 HashMap (java.util.HashMap)33 StreamSchema (com.ibm.streams.operator.StreamSchema)25 OutputTuple (com.ibm.streams.operator.OutputTuple)20 Tuple (com.ibm.streams.operator.Tuple)19 Map (java.util.Map)17 TStream (com.ibm.streamsx.topology.TStream)15 SPL (com.ibm.streamsx.topology.spl.SPL)14 Condition (com.ibm.streamsx.topology.tester.Condition)14 TimeUnit (java.util.concurrent.TimeUnit)14 Constants (com.ibm.streamsx.kafka.test.utils.Constants)12 KafkaSPLStreamsUtils (com.ibm.streamsx.kafka.test.utils.KafkaSPLStreamsUtils)12 StreamsContext (com.ibm.streamsx.topology.context.StreamsContext)12 Type (com.ibm.streamsx.topology.context.StreamsContext.Type)12 StreamsContextFactory (com.ibm.streamsx.topology.context.StreamsContextFactory)12