use of com.ibm.streamsx.topology.TStream in project streamsx.topology by IBMStreams.
the class IsolateTest method unionIsolateTest.
@Test
public void unionIsolateTest() throws Exception {
assumeTrue(SC_OK);
assumeTrue(isMainRun());
Topology topology = newTopology("unionIsolateTest");
TStream<String> s1 = topology.strings("1");
TStream<String> s2 = topology.strings("2");
TStream<String> s3 = topology.strings("3");
TStream<String> s4 = topology.strings("4");
Set<TStream<String>> l = new HashSet<>();
l.add(s1);
l.add(s2);
l.add(s3);
l.add(s4);
TStream<String> n = s1.union(l).isolate();
n.print();
n.print();
n.print();
n.print();
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(n, 4);
Condition<List<String>> expectedContent = tester.stringContentsUnordered(n, "1", "2", "3", "4");
StreamsContextFactory.getStreamsContext(StreamsContext.Type.TOOLKIT).submit(topology).get();
// assertTrue(expectedCount.valid());
// assertTrue(expectedContent.valid());
}
use of com.ibm.streamsx.topology.TStream in project streamsx.topology by IBMStreams.
the class PublishSubscribeTest method testPublishXML.
@Test
public void testPublishXML() throws Exception {
TStream<String> source = source();
source = source.transform(s -> "<a>" + s + "</a>");
TStream<XML> xml = source.transform(v -> {
try {
return ValueFactory.newXML(new ByteArrayInputStream(v.getBytes(StandardCharsets.UTF_8)));
} catch (IOException e) {
return null;
}
}).asType(XML.class);
xml.publish("testPublishXML");
TStream<XML> subscribe = source.topology().subscribe("testPublishXML", XML.class);
TStream<String> strings = subscribe.transform(v -> {
byte[] data = new byte[100];
InputStream in = v.getInputStream();
int read;
try {
read = in.read(data);
} catch (IOException e) {
return null;
}
return new String(data, 0, read, StandardCharsets.UTF_8);
});
strings = strings.transform(s -> s.substring(3, s.length() - 4));
checkSubscribedAsStrings(strings);
}
use of com.ibm.streamsx.topology.TStream in project streamsx.kafka by IBMStreams.
the class KafkaOperatorsBlobTypeTest method kafkaBlobTypeTest.
@Test
public void kafkaBlobTypeTest() throws Exception {
Topology topo = getTopology();
StreamSchema schema = KafkaSPLStreamsUtils.BLOB_SCHEMA;
// create the producer (produces tuples after a short delay)
TStream<Blob> srcStream = topo.strings(Constants.STRING_DATA).transform(s -> ValueFactory.newBlob(s.getBytes())).modify(new Delay<>(5000));
SPLStream splSrcStream = SPLStreams.convertStream(srcStream, new Converter(), schema);
SPL.invokeSink(Constants.KafkaProducerOp, splSrcStream, getKafkaParams());
// create the consumer
SPLStream consumerStream = SPL.invokeSource(topo, Constants.KafkaConsumerOp, getKafkaParams(), schema);
SPLStream msgStream = SPLStreams.stringToSPLStream(consumerStream.convert(t -> new String(t.getBlob("message").getData())));
// test the output of the consumer
StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
Tester tester = topo.getTester();
Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, Constants.STRING_DATA);
tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
// check the results
Assert.assertTrue(condition.getResult().size() > 0);
Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
use of com.ibm.streamsx.topology.TStream in project streamsx.kafka by IBMStreams.
the class KafkaOperatorsDoubleTypeTest method kafkaDoubleTypeTest.
@Test
public void kafkaDoubleTypeTest() throws Exception {
Topology topo = getTopology();
StreamSchema schema = KafkaSPLStreamsUtils.DOUBLE_SCHEMA;
// create the producer (produces tuples after a short delay)
TStream<Double> srcStream = topo.strings(DATA).transform(s -> Double.valueOf(s)).modify(new Delay<>(5000));
SPLStream splSrcStream = SPLStreams.convertStream(srcStream, new Converter(), schema);
SPL.invokeSink(Constants.KafkaProducerOp, splSrcStream, getKafkaParams());
// create the consumer
SPLStream consumerStream = SPL.invokeSource(topo, Constants.KafkaConsumerOp, getKafkaParams(), schema);
SPLStream msgStream = SPLStreams.stringToSPLStream(consumerStream.convert(t -> String.valueOf(t.getDouble("message"))));
// test the output of the consumer
StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
Tester tester = topo.getTester();
Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, DATA);
tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
// check the results
Assert.assertTrue(condition.getResult().size() > 0);
Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
use of com.ibm.streamsx.topology.TStream in project streamsx.kafka by IBMStreams.
the class KafkaOperatorsFloatTypeTest method kafkaFloatTypeTest.
@Test
public void kafkaFloatTypeTest() throws Exception {
Topology topo = getTopology();
StreamSchema schema = KafkaSPLStreamsUtils.FLOAT_SCHEMA;
// create the producer (produces tuples after a short delay)
TStream<Float> srcStream = topo.strings(DATA).transform(s -> Float.valueOf(s)).modify(new Delay<>(5000));
SPLStream splSrcStream = SPLStreams.convertStream(srcStream, new Converter(), schema);
SPL.invokeSink(Constants.KafkaProducerOp, splSrcStream, getKafkaParams());
// create the consumer
SPLStream consumerStream = SPL.invokeSource(topo, Constants.KafkaConsumerOp, getKafkaParams(), schema);
SPLStream msgStream = SPLStreams.stringToSPLStream(consumerStream.convert(t -> String.valueOf(t.getFloat("message"))));
// test the output of the consumer
StreamsContext<?> context = StreamsContextFactory.getStreamsContext(Type.DISTRIBUTED_TESTER);
Tester tester = topo.getTester();
Condition<List<String>> condition = KafkaSPLStreamsUtils.stringContentsUnordered(tester, msgStream, DATA);
tester.complete(context, new HashMap<>(), condition, 30, TimeUnit.SECONDS);
// check the results
Assert.assertTrue(condition.getResult().size() > 0);
Assert.assertTrue(condition.getResult().toString(), condition.valid());
}
Aggregations