use of com.ibm.streamsx.topology.tuple.Message in project streamsx.topology by IBMStreams.
the class KafkaProducer method publish.
/**
* Publish a stream of messages to a topic.
* <p>
* If {@code topic} is null, each tuple is published to the topic
* specified by its {@link Message#getTopic()}.
* Otherwise, all tuples are published to {@code topic}.
* <p>
* The messages added to Kafka include a topic, message and key.
* If {@link Message#getKey()} is null, an empty key value is published.
* <p>
* N.B. there seem to be some issues with the underlying
* com.ibm.streamsx.messaging library - e.g.,
* <a href="https://github.com/IBMStreams/streamsx.messaging/issues/118">issue#118</a>.
* If your application is experiencing odd Kafka behavior
* try isolating the producer from its feeding streams.
* e.g.,
* <pre>
* KafkaProducer pc = ...
* TStream<Message> s = ...
* pc.publish(s.isolate(), ...);
* </pre>
*
* @param stream the stream to publish
* @param topic topic to publish to. May be null.
* @return the sink element
*
* @throws IllegalArgumentException if a non-null empty {@code topic} is specified.
*/
public TSink publish(TStream<? extends Message> stream, Supplier<String> topic) {
stream = stream.lowLatency();
@SuppressWarnings("unchecked") SPLStream splStream = SPLStreams.convertStream((TStream<Message>) stream, cvtMsgFunc(topic), KafkaSchemas.KAFKA);
Map<String, Object> params = new HashMap<String, Object>();
if (!config.isEmpty())
params.put("kafkaProperty", Util.toKafkaProperty(config));
if (topic == null)
params.put("topicAttribute", "topic");
else
params.put("topic", topic);
// workaround streamsx.messaging issue #107
params.put("propertiesFile", PROP_FILE_PARAM);
addPropertiesFile();
// Use SPL.invoke to avoid adding a compile time dependency
// to com.ibm.streamsx.messaging since JavaPrimitive.invoke*()
// lack "kind" based variants.
String kind = "com.ibm.streamsx.messaging.kafka::KafkaProducer";
String className = "com.ibm.streamsx.messaging.kafka.KafkaSink";
TSink sink = SPL.invokeSink(kind, splStream, params);
SPL.tagOpAsJavaPrimitive(sink.operator(), kind, className);
return sink;
}
use of com.ibm.streamsx.topology.tuple.Message in project streamsx.topology by IBMStreams.
the class KafkaSample method publishSubscribe.
/**
* Publish some messages to a topic, scribe to the topic and report
* received messages.
* @param contextType string value of a {@code StreamsContext.Type}
* @throws Exception
*/
public void publishSubscribe(String contextType) throws Exception {
setupConfig();
identifyStreamsxMessagingVer();
Topology top = new Topology("kafkaSample");
String groupId = newGroupId(top.getName());
Supplier<String> topic = new Value<String>(TOPIC);
KafkaProducer producer = new KafkaProducer(top, createProducerConfig());
KafkaConsumer consumer = new KafkaConsumer(top, createConsumerConfig(groupId));
TStream<Message> msgs = makeStreamToPublish(top);
// for the sample, give the consumer a chance to become ready
msgs = msgs.modify(initialDelayFunc(PUB_DELAY_MSEC));
producer.publish(msgs, topic);
TStream<Message> rcvdMsgs = consumer.subscribe(topic);
// show what we received
rcvdMsgs.print();
// Execute the topology, to send and receive the messages.
Future<?> future = StreamsContextFactory.getStreamsContext(contextType).submit(top, config);
if (contextType.contains("DISTRIBUTED")) {
System.out.println("\nSee the job's PE console logs for the topology output.\n");
} else if (contextType.contains("STANDALONE") || contextType.contains("EMBEDDED")) {
Thread.sleep(15000);
future.cancel(true);
}
}
use of com.ibm.streamsx.topology.tuple.Message in project streamsx.topology by IBMStreams.
the class KafkaStreamsTest method testMultiTopicProducer.
@Test
public void testMultiTopicProducer() throws Exception {
checkAssumes();
// streamsx.messaging issue#118 prevents successful execution
// For standalone it seems to consistently get 0 topic1 msgs.
assumeTrue(getTesterType() != StreamsContext.Type.STANDALONE_TESTER);
Topology top = new Topology("testMultiTopicProducer");
MsgGenerator mgen = new MsgGenerator(top.getName());
String groupId = newGroupId(top.getName());
String[] topics = getKafkaTopics();
String topic1Val = topics[0];
String topic2Val = topics[1];
Supplier<String> topic1 = new Value<String>(topic1Val);
Supplier<String> topic2 = new Value<String>(topic2Val);
KafkaProducer producer = new KafkaProducer(top, createProducerConfig());
KafkaConsumer consumer = new KafkaConsumer(top, createConsumerConfig(groupId));
// Test producer that publishes to multiple topics (implies implicit topic)
List<Message> topic1Msgs = new ArrayList<>();
topic1Msgs.add(new SimpleMessage(mgen.create(topic1Val, "Hello"), null, topic1Val));
topic1Msgs.add(new SimpleMessage(mgen.create(topic1Val, "Are you there?"), null, topic1Val));
List<Message> topic2Msgs = new ArrayList<>();
topic2Msgs.add(new SimpleMessage(mgen.create(topic2Val, "Hello"), null, topic2Val));
topic2Msgs.add(new SimpleMessage(mgen.create(topic2Val, "Are you there?"), null, topic2Val));
List<Message> msgs = new ArrayList<>(topic1Msgs);
msgs.addAll(topic2Msgs);
TStream<Message> msgsToPublish = top.constants(msgs);
msgsToPublish = msgsToPublish.modify(new InitialDelay<Message>(PUB_DELAY_MSEC));
producer.publish(msgsToPublish);
TStream<Message> rcvdTopic1Msgs = consumer.subscribe(topic1);
TStream<Message> rcvdTopic2Msgs = consumer.subscribe(topic2);
// for validation...
TStream<Message> rcvdMsgs = rcvdTopic1Msgs.union(rcvdTopic2Msgs);
rcvdMsgs.print();
// just our msgs
rcvdMsgs = selectMsgs(rcvdMsgs, mgen.pattern());
TStream<String> rcvdAsString = rcvdMsgs.transform(msgToJSONStringFunc());
List<String> expectedAsString = mapList(msgs, msgToJSONStringFunc());
setupDebug();
if (testBuildOnly(top))
return;
completeAndValidateUnordered(groupId, top, rcvdAsString, SEC_TIMEOUT, expectedAsString.toArray(new String[0]));
}
use of com.ibm.streamsx.topology.tuple.Message in project streamsx.topology by IBMStreams.
the class KafkaStreamsTest method testImplicitTopicProducer.
@Test
public void testImplicitTopicProducer() throws Exception {
checkAssumes();
Topology top = new Topology("testImplicitTopicProducer");
MsgGenerator mgen = new MsgGenerator(top.getName());
String groupId = newGroupId(top.getName());
String topicVal = getKafkaTopics()[0];
Supplier<String> topic = new Value<String>(topicVal);
KafkaProducer producer = new KafkaProducer(top, createProducerConfig());
KafkaConsumer consumer = new KafkaConsumer(top, createConsumerConfig(groupId));
// Test producer that takes an arbitrary TStream<T> and implicit topic
List<Vals> msgs = new ArrayList<>();
msgs.add(new Vals(mgen.create(topicVal, "Hello"), null, null));
msgs.add(new Vals(mgen.create(topicVal, "key1", "Are you there?"), "key1", null));
TStream<Vals> valsToPublish = top.constants(msgs).asType(Vals.class);
TStream<Message> msgsToPublish = valsToPublish.transform(msgFromValsFunc(topicVal));
msgsToPublish = msgsToPublish.modify(new InitialDelay<Message>(PUB_DELAY_MSEC));
producer.publish(msgsToPublish);
TStream<Message> rcvdMsgs = consumer.subscribe(topic);
// for validation...
rcvdMsgs.print();
// just our msgs
rcvdMsgs = selectMsgs(rcvdMsgs, mgen.pattern());
TStream<String> rcvdAsString = rcvdMsgs.transform(msgToJSONStringFunc());
List<Message> expectedAsMessage = mapList(msgs, msgFromValsFunc(topicVal));
List<String> expectedAsString = mapList(expectedAsMessage, msgToJSONStringFunc());
setupDebug();
if (testBuildOnly(top))
return;
completeAndValidate(groupId, top, rcvdAsString, SEC_TIMEOUT, expectedAsString.toArray(new String[0]));
}
use of com.ibm.streamsx.topology.tuple.Message in project streamsx.topology by IBMStreams.
the class KafkaStreamsTest method testReusableApp.
@Test
public void testReusableApp() throws Exception {
// Threads per topic not supported in 4.2 onwards
skipVersion("kafka.threadsPerTopic", 4, 2);
checkAssumes();
Topology top = new Topology("testReusableApp");
MsgGenerator mgen = new MsgGenerator(top.getName());
String groupId = newGroupId(top.getName());
String topicVal = getKafkaTopics()[0];
Supplier<String> topic = top.createSubmissionParameter("kafka.topic", topicVal);
Supplier<Integer> threadsPerTopic = top.createSubmissionParameter("kafka.consumer.threadsPerTopic", 1);
KafkaProducer producer = new KafkaProducer(top, createProducerConfig());
KafkaConsumer consumer = new KafkaConsumer(top, createConsumerConfig(groupId));
// Test producer that takes an arbitrary TStream<T> and explicit topic
List<Vals> msgs = new ArrayList<>();
msgs.add(new Vals(mgen.create(topicVal, "Hello"), null, null));
msgs.add(new Vals(mgen.create(topicVal, "key1", "Are you there?"), "key1", null));
msgs.add(new Vals(mgen.create(topicVal, "Msg with an empty key"), "", null));
msgs.add(new Vals("", mgen.create(topicVal, null, "Msg with an empty msg (this is the key)"), null));
TStream<Vals> valsToPublish = top.constants(msgs).asType(Vals.class);
TStream<Message> msgsToPublish = valsToPublish.transform(msgFromValsFunc(null));
msgsToPublish = msgsToPublish.modify(new InitialDelay<Message>(PUB_DELAY_MSEC));
producer.publish(msgsToPublish, topic);
TStream<Message> rcvdMsgs = consumer.subscribe(threadsPerTopic, topic);
// for validation...
rcvdMsgs.print();
// just our msgs
rcvdMsgs = selectMsgs(rcvdMsgs, mgen.pattern());
TStream<String> rcvdAsString = rcvdMsgs.transform(msgToJSONStringFunc());
List<Message> expectedAsMessage = mapList(msgs, msgFromValsFunc(topicVal));
expectedAsMessage = modifyList(expectedAsMessage, adjustKey());
List<String> expectedAsString = mapList(expectedAsMessage, msgToJSONStringFunc());
setupDebug();
if (testBuildOnly(top))
return;
completeAndValidate(groupId, top, rcvdAsString, SEC_TIMEOUT, expectedAsString.toArray(new String[0]));
}
Aggregations