use of kafka.producer.ProducerConfig in project druid by druid-io.
the class TestKafkaExtractionCluster method testSimpleRename.
@Test(timeout = 60_000L)
public void testSimpleRename() throws InterruptedException {
final Properties kafkaProducerProperties = makeProducerProperties();
final Producer<byte[], byte[]> producer = new Producer<>(new ProducerConfig(kafkaProducerProperties));
closer.register(new Closeable() {
@Override
public void close() throws IOException {
producer.close();
}
});
checkServer();
assertUpdated(null, "foo");
assertReverseUpdated(ImmutableList.<String>of(), "foo");
long events = factory.getCompletedEventCount();
log.info("------------------------- Sending foo bar -------------------------------");
producer.send(new KeyedMessage<>(topicName, StringUtils.toUtf8("foo"), StringUtils.toUtf8("bar")));
long start = System.currentTimeMillis();
while (events == factory.getCompletedEventCount()) {
Thread.sleep(100);
if (System.currentTimeMillis() > start + 60_000) {
throw new ISE("Took too long to update event");
}
}
log.info("------------------------- Checking foo bar -------------------------------");
assertUpdated("bar", "foo");
assertReverseUpdated(Collections.singletonList("foo"), "bar");
assertUpdated(null, "baz");
checkServer();
events = factory.getCompletedEventCount();
log.info("------------------------- Sending baz bat -------------------------------");
producer.send(new KeyedMessage<>(topicName, StringUtils.toUtf8("baz"), StringUtils.toUtf8("bat")));
while (events == factory.getCompletedEventCount()) {
Thread.sleep(10);
if (System.currentTimeMillis() > start + 60_000) {
throw new ISE("Took too long to update event");
}
}
log.info("------------------------- Checking baz bat -------------------------------");
Assert.assertEquals("bat", factory.get().apply("baz"));
Assert.assertEquals(Collections.singletonList("baz"), factory.get().unapply("bat"));
}
use of kafka.producer.ProducerConfig in project pinot by linkedin.
the class BaseClusterIntegrationTest method pushAvroIntoKafka.
public static void pushAvroIntoKafka(List<File> avroFiles, String kafkaBroker, String kafkaTopic, final byte[] header) {
Properties properties = new Properties();
properties.put("metadata.broker.list", kafkaBroker);
properties.put("serializer.class", "kafka.serializer.DefaultEncoder");
properties.put("request.required.acks", "1");
ProducerConfig producerConfig = new ProducerConfig(properties);
Producer<byte[], byte[]> producer = new Producer<byte[], byte[]>(producerConfig);
for (File avroFile : avroFiles) {
try {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream(65536);
DataFileStream<GenericRecord> reader = AvroUtils.getAvroReader(avroFile);
BinaryEncoder binaryEncoder = new EncoderFactory().directBinaryEncoder(outputStream, null);
GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(reader.getSchema());
int recordCount = 0;
List<KeyedMessage<byte[], byte[]>> messagesToWrite = new ArrayList<KeyedMessage<byte[], byte[]>>(10000);
int messagesInThisBatch = 0;
for (GenericRecord genericRecord : reader) {
outputStream.reset();
if (header != null && 0 < header.length) {
outputStream.write(header);
}
datumWriter.write(genericRecord, binaryEncoder);
binaryEncoder.flush();
byte[] bytes = outputStream.toByteArray();
KeyedMessage<byte[], byte[]> data = new KeyedMessage<byte[], byte[]>(kafkaTopic, Longs.toByteArray(System.currentTimeMillis()), bytes);
if (BATCH_KAFKA_MESSAGES) {
messagesToWrite.add(data);
messagesInThisBatch++;
if (MAX_MESSAGES_PER_BATCH <= messagesInThisBatch) {
LOGGER.debug("Sending a batch of {} records to Kafka", messagesInThisBatch);
messagesInThisBatch = 0;
producer.send(messagesToWrite);
messagesToWrite.clear();
}
} else {
producer.send(data);
}
recordCount += 1;
}
if (BATCH_KAFKA_MESSAGES) {
LOGGER.info("Sending last match of {} records to Kafka", messagesToWrite.size());
producer.send(messagesToWrite);
}
outputStream.close();
reader.close();
LOGGER.info("Finished writing " + recordCount + " records from " + avroFile.getName() + " into Kafka topic " + kafkaTopic + " from file " + avroFile.getName());
int totalRecordCount = totalAvroRecordWrittenCount.addAndGet(recordCount);
LOGGER.info("Total records written so far " + totalRecordCount);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
}
use of kafka.producer.ProducerConfig in project bagheera by mozilla-metrics.
the class ProducerTest method produceData.
private void produceData(boolean includeBadRecord) throws InterruptedException {
Properties props = getProperties();
kafka.javaapi.producer.Producer<String, BagheeraMessage> producer = new kafka.javaapi.producer.Producer<String, BagheeraMessage>(new ProducerConfig(props));
BagheeraMessage msg = getMessage(GOOD_MESSAGE_SIZE);
assertEquals(GOOD_MESSAGE_SIZE, msg.getPayload().size());
producer.send(getProducerData(msg));
producer.send(getProducerData(getMessage(GOOD_MESSAGE_SIZE)));
if (includeBadRecord) {
producer.send(getProducerData(getMessage(BAD_MESSAGE_SIZE)));
}
for (int i = 0; i < BATCH_SIZE; i++) {
producer.send(getProducerData(getMessage(GOOD_MESSAGE_SIZE)));
}
producer.close();
// Wait for flush
Thread.sleep(100);
}
use of kafka.producer.ProducerConfig in project presto by prestodb.
the class EmbeddedKafka method createProducer.
public CloseableProducer<Long, Object> createProducer() {
Map<String, String> properties = ImmutableMap.<String, String>builder().put("metadata.broker.list", getConnectString()).put("serializer.class", JsonEncoder.class.getName()).put("key.serializer.class", NumberEncoder.class.getName()).put("partitioner.class", NumberPartitioner.class.getName()).put("request.required.acks", "1").build();
ProducerConfig producerConfig = new ProducerConfig(toProperties(properties));
return new CloseableProducer<>(producerConfig);
}
use of kafka.producer.ProducerConfig in project avro-kafka-storm by ransilberman.
the class MainTest method testCompiledDatumRecord.
@Test
public void testCompiledDatumRecord() throws IOException, InterruptedException {
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(getClass().getResourceAsStream("LPEvent.avsc"));
LPEvent datum = new LPEvent();
datum.setRevision(1L);
datum.setSiteId("28280110");
datum.setEventType("PLine");
datum.setTimeStamp(System.currentTimeMillis());
datum.setSessionId("123456II");
pline plineDatum = new pline();
plineDatum.setText("Hello, I am your agent");
plineDatum.setLineType(2);
plineDatum.setRepId("REPID7777");
datum.setSubrecord(plineDatum);
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<LPEvent> writer = new SpecificDatumWriter<LPEvent>(LPEvent.class);
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
writer.write(datum, encoder);
encoder.flush();
out.close();
Message message = new Message(out.toByteArray());
Properties props = new Properties();
props.put("zk.connect", zkConnection);
Producer<Message, Message> producer = new kafka.javaapi.producer.Producer<Message, Message>(new ProducerConfig(props));
producer.send(new ProducerData<Message, Message>(topic, message));
}
Aggregations