use of kafka.message.Message in project storm by apache.
the class TestUtils method verifyMessage.
public static boolean verifyMessage(String key, String message, KafkaTestBroker broker, SimpleConsumer simpleConsumer) {
long lastMessageOffset = KafkaUtils.getOffset(simpleConsumer, TestUtils.TOPIC, 0, OffsetRequest.LatestTime()) - 1;
ByteBufferMessageSet messageAndOffsets = KafkaUtils.fetchMessages(TestUtils.getKafkaConfig(broker), simpleConsumer, new Partition(Broker.fromString(broker.getBrokerConnectionString()), TestUtils.TOPIC, 0), lastMessageOffset);
MessageAndOffset messageAndOffset = messageAndOffsets.iterator().next();
Message kafkaMessage = messageAndOffset.message();
ByteBuffer messageKeyBuffer = kafkaMessage.key();
String keyString = null;
String messageString = new String(Utils.toByteArray(kafkaMessage.payload()));
if (messageKeyBuffer != null) {
keyString = new String(Utils.toByteArray(messageKeyBuffer));
}
assertEquals(key, keyString);
assertEquals(message, messageString);
return true;
}
use of kafka.message.Message in project elasticsearch-river-kafka by endgameinc.
the class JsonMessageHandlerTest method testIt.
public void testIt() throws Exception {
Client client = createMock(Client.class);
IndexRequestBuilder irb = createMock(IndexRequestBuilder.class);
JsonMessageHandler h = new JsonMessageHandler(client);
byte[] json = toJson(rec).getBytes();
expect(client.prepareIndex(anyObject(String.class), anyObject(String.class), anyObject(String.class))).andReturn(irb);
replay(client);
Message message = createMock(Message.class);
expect(message.payload()).andReturn(ByteBuffer.wrap(json));
replay(message);
BulkRequestBuilder bulkRequestBuilder = createMock(BulkRequestBuilder.class);
expect(bulkRequestBuilder.add(anyObject(IndexRequestBuilder.class))).andReturn(null);
replay(bulkRequestBuilder);
try {
h.handle(bulkRequestBuilder, message);
} catch (Exception e) {
fail("This should not fail");
}
verify(client);
}
use of kafka.message.Message in project elasticsearch-river-kafka by endgameinc.
the class JsonMessageHandlerTest method testGettersFromReadMessageReturnedMap.
public void testGettersFromReadMessageReturnedMap() throws Exception {
JsonMessageHandler h = new JsonMessageHandler(null);
byte[] json = toJson(rec).getBytes();
Message message = createMock(Message.class);
expect(message.payload()).andReturn(ByteBuffer.wrap(json));
replay(message);
try {
h.readMessage(message);
} catch (Exception e) {
fail("This should not fail");
}
assertEquals(h.getIndex(), rec.get("index"));
assertEquals(h.getType(), rec.get("type"));
assertEquals(h.getSource(), rec.get("source"));
assertEquals(h.getId(), rec.get("id"));
verify(message);
}
use of kafka.message.Message in project bagheera by mozilla-metrics.
the class KafkaConsumer method poll.
@Override
public void poll() {
final CountDownLatch latch = new CountDownLatch(streams.size());
for (final KafkaStream<Message> stream : streams) {
workers.add(executor.submit(new Callable<Void>() {
@Override
public Void call() {
try {
for (MessageAndMetadata<Message> mam : stream) {
BagheeraMessage bmsg = BagheeraMessage.parseFrom(ByteString.copyFrom(mam.message().payload()));
// get the sink for this message's namespace
// (typically only one sink unless a regex pattern was used to listen to multiple topics)
KeyValueSink sink = sinkFactory.getSink(bmsg.getNamespace());
if (sink == null) {
LOG.error("Could not obtain sink for namespace: " + bmsg.getNamespace());
break;
}
if (bmsg.getOperation() == Operation.CREATE_UPDATE && bmsg.hasId() && bmsg.hasPayload()) {
if (validationPipeline == null || validationPipeline.isValid(bmsg.getPayload().toByteArray())) {
if (bmsg.hasTimestamp()) {
sink.store(bmsg.getId(), bmsg.getPayload().toByteArray(), bmsg.getTimestamp());
} else {
sink.store(bmsg.getId(), bmsg.getPayload().toByteArray());
}
} else {
invalidMessageMeter.mark();
// TODO: sample out an example payload
LOG.warn("Invalid payload for namespace: " + bmsg.getNamespace());
}
} else if (bmsg.getOperation() == Operation.DELETE && bmsg.hasId()) {
sink.delete(bmsg.getId());
}
consumed.mark();
}
} catch (InvalidProtocolBufferException e) {
LOG.error("Invalid protocol buffer in data stream", e);
} catch (UnsupportedEncodingException e) {
LOG.error("Message ID was not in UTF-8 encoding", e);
} catch (IOException e) {
LOG.error("IO error while storing to data sink", e);
} finally {
latch.countDown();
}
return null;
}
}));
}
// run indefinitely unless we detect that a thread exited
try {
while (true) {
latch.await(10, TimeUnit.SECONDS);
if (latch.getCount() != streams.size()) {
// we have a dead thread and should exit
break;
}
}
} catch (InterruptedException e) {
LOG.info("Interrupted during polling", e);
}
// Spit out errors if there were any
for (Future<Void> worker : workers) {
try {
if (worker.isDone() && !worker.isCancelled()) {
worker.get(1, TimeUnit.SECONDS);
}
} catch (InterruptedException e) {
LOG.error("Thread was interrupted:", e);
} catch (ExecutionException e) {
LOG.error("Exception occured in thread:", e);
} catch (TimeoutException e) {
LOG.error("Timed out waiting for thread result:", e);
} catch (CancellationException e) {
LOG.error("Thread has been canceled: ", e);
}
}
}
use of kafka.message.Message in project avro-kafka-storm by ransilberman.
the class MainTest method testCompiledDatumRecord.
@Test
public void testCompiledDatumRecord() throws IOException, InterruptedException {
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(getClass().getResourceAsStream("LPEvent.avsc"));
LPEvent datum = new LPEvent();
datum.setRevision(1L);
datum.setSiteId("28280110");
datum.setEventType("PLine");
datum.setTimeStamp(System.currentTimeMillis());
datum.setSessionId("123456II");
pline plineDatum = new pline();
plineDatum.setText("Hello, I am your agent");
plineDatum.setLineType(2);
plineDatum.setRepId("REPID7777");
datum.setSubrecord(plineDatum);
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<LPEvent> writer = new SpecificDatumWriter<LPEvent>(LPEvent.class);
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
writer.write(datum, encoder);
encoder.flush();
out.close();
Message message = new Message(out.toByteArray());
Properties props = new Properties();
props.put("zk.connect", zkConnection);
Producer<Message, Message> producer = new kafka.javaapi.producer.Producer<Message, Message>(new ProducerConfig(props));
producer.send(new ProducerData<Message, Message>(topic, message));
}
Aggregations