Search in sources :

Example 66 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project drill by apache.

the class KafkaMessageGenerator method populateAvroMsgIntoKafka.

public void populateAvroMsgIntoKafka(String topic, int numMsg) {
    producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class);
    try (KafkaProducer<Object, GenericRecord> producer = new KafkaProducer<>(producerProperties)) {
        Schema.Parser parser = new Schema.Parser();
        String userSchema = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":[" + "{\"name\":\"key1\",\"type\":\"string\"}," + "{\"name\":\"key2\",\"type\":\"int\"}," + "{\"name\":\"key3\",\"type\":\"boolean\"}," + "{\"name\":\"key5\",\"type\":{\"type\":\"array\",\"items\":\"int\"}}," + "{\"name\":\"key6\",\"type\":{\"type\":\"record\",\"name\":\"myrecord6\",\"fields\":[" + "{\"name\":\"key61\",\"type\":\"double\"}," + "{\"name\":\"key62\",\"type\":\"double\"}]}}]}";
        Schema valueSchema = parser.parse(userSchema);
        GenericRecordBuilder valueBuilder = new GenericRecordBuilder(valueSchema);
        String key1Schema = "{\"type\":\"record\"," + "\"name\":\"key1record\"," + "\"fields\":[" + "{\"name\":\"key1\",\"type\":\"string\"}]}\"";
        Schema keySchema = parser.parse(key1Schema);
        GenericRecordBuilder keyBuilder = new GenericRecordBuilder(keySchema);
        Random rand = new Random();
        for (int i = 0; i < numMsg; ++i) {
            // value record
            String key1 = UUID.randomUUID().toString();
            valueBuilder.set("key1", key1);
            valueBuilder.set("key2", rand.nextInt());
            valueBuilder.set("key3", rand.nextBoolean());
            List<Integer> list = Lists.newArrayList();
            list.add(rand.nextInt(100));
            list.add(rand.nextInt(100));
            list.add(rand.nextInt(100));
            valueBuilder.set("key5", list);
            GenericRecordBuilder innerBuilder = new GenericRecordBuilder(valueSchema.getField("key6").schema());
            innerBuilder.set("key61", rand.nextDouble());
            innerBuilder.set("key62", rand.nextDouble());
            valueBuilder.set("key6", innerBuilder.build());
            Record producerRecord = valueBuilder.build();
            // key record
            keyBuilder.set("key1", key1);
            Record keyRecord = keyBuilder.build();
            ProducerRecord<Object, GenericRecord> record = new ProducerRecord<>(topic, keyRecord, producerRecord);
            producer.send(record);
        }
    }
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) Schema(org.apache.avro.Schema) Random(java.util.Random) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) GenericRecordBuilder(org.apache.avro.generic.GenericRecordBuilder) JsonObject(com.google.gson.JsonObject) GenericRecord(org.apache.avro.generic.GenericRecord) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Record(org.apache.avro.generic.GenericData.Record) GenericRecord(org.apache.avro.generic.GenericRecord)

Example 67 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project logging-log4j2 by apache.

the class KafkaAppenderTest method testAppendWithKeyLookup.

@Test
public void testAppendWithKeyLookup() throws Exception {
    final Appender appender = ctx.getRequiredAppender("KafkaAppenderWithKeyLookup");
    final LogEvent logEvent = createLogEvent();
    Date date = new Date();
    SimpleDateFormat format = new SimpleDateFormat("dd-MM-yyyy");
    appender.append(logEvent);
    final List<ProducerRecord<byte[], byte[]>> history = kafka.history();
    assertEquals(1, history.size());
    final ProducerRecord<byte[], byte[]> item = history.get(0);
    assertNotNull(item);
    assertEquals(TOPIC_NAME, item.topic());
    byte[] keyValue = format.format(date).getBytes(StandardCharsets.UTF_8);
    assertEquals(Long.valueOf(logEvent.getTimeMillis()), item.timestamp());
    assertArrayEquals(item.key(), keyValue);
    assertEquals(LOG_MESSAGE, new String(item.value(), StandardCharsets.UTF_8));
}
Also used : Appender(org.apache.logging.log4j.core.Appender) LogEvent(org.apache.logging.log4j.core.LogEvent) Log4jLogEvent(org.apache.logging.log4j.core.impl.Log4jLogEvent) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) SimpleDateFormat(java.text.SimpleDateFormat) Date(java.util.Date) Test(org.junit.Test)

Example 68 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project logging-log4j2 by apache.

the class KafkaAppenderTest method testAppendWithLayout.

@Test
public void testAppendWithLayout() throws Exception {
    final Appender appender = ctx.getRequiredAppender("KafkaAppenderWithLayout");
    appender.append(createLogEvent());
    final List<ProducerRecord<byte[], byte[]>> history = kafka.history();
    assertEquals(1, history.size());
    final ProducerRecord<byte[], byte[]> item = history.get(0);
    assertNotNull(item);
    assertEquals(TOPIC_NAME, item.topic());
    assertNull(item.key());
    assertEquals("[" + LOG_MESSAGE + "]", new String(item.value(), StandardCharsets.UTF_8));
}
Also used : Appender(org.apache.logging.log4j.core.Appender) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Test(org.junit.Test)

Example 69 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project flink by apache.

the class DynamicKafkaRecordSerializationSchema method serialize.

@Override
public ProducerRecord<byte[], byte[]> serialize(RowData consumedRow, KafkaSinkContext context, Long timestamp) {
    // shortcut in case no input projection is required
    if (keySerialization == null && !hasMetadata) {
        final byte[] valueSerialized = valueSerialization.serialize(consumedRow);
        return new ProducerRecord<>(topic, extractPartition(consumedRow, null, valueSerialized, context.getPartitionsForTopic(topic)), null, valueSerialized);
    }
    final byte[] keySerialized;
    if (keySerialization == null) {
        keySerialized = null;
    } else {
        final RowData keyRow = createProjectedRow(consumedRow, RowKind.INSERT, keyFieldGetters);
        keySerialized = keySerialization.serialize(keyRow);
    }
    final byte[] valueSerialized;
    final RowKind kind = consumedRow.getRowKind();
    if (upsertMode) {
        if (kind == RowKind.DELETE || kind == RowKind.UPDATE_BEFORE) {
            // transform the message as the tombstone message
            valueSerialized = null;
        } else {
            // make the message to be INSERT to be compliant with the INSERT-ONLY format
            final RowData valueRow = DynamicKafkaRecordSerializationSchema.createProjectedRow(consumedRow, kind, valueFieldGetters);
            valueRow.setRowKind(RowKind.INSERT);
            valueSerialized = valueSerialization.serialize(valueRow);
        }
    } else {
        final RowData valueRow = DynamicKafkaRecordSerializationSchema.createProjectedRow(consumedRow, kind, valueFieldGetters);
        valueSerialized = valueSerialization.serialize(valueRow);
    }
    return new ProducerRecord<>(topic, extractPartition(consumedRow, keySerialized, valueSerialized, context.getPartitionsForTopic(topic)), readMetadata(consumedRow, KafkaDynamicSink.WritableMetadata.TIMESTAMP), keySerialized, valueSerialized, readMetadata(consumedRow, KafkaDynamicSink.WritableMetadata.HEADERS));
}
Also used : GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) RowKind(org.apache.flink.types.RowKind)

Example 70 with ProducerRecord

use of org.apache.kafka.clients.producer.ProducerRecord in project flink by apache.

the class KafkaTestBase method produceToKafka.

public static <K, V> void produceToKafka(Collection<ProducerRecord<K, V>> records, Class<? extends org.apache.kafka.common.serialization.Serializer<K>> keySerializerClass, Class<? extends org.apache.kafka.common.serialization.Serializer<V>> valueSerializerClass) throws Throwable {
    Properties props = new Properties();
    props.putAll(standardProps);
    props.putAll(kafkaServer.getIdempotentProducerConfig());
    props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, keySerializerClass.getName());
    props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializerClass.getName());
    AtomicReference<Throwable> sendingError = new AtomicReference<>();
    Callback callback = (metadata, exception) -> {
        if (exception != null) {
            if (!sendingError.compareAndSet(null, exception)) {
                sendingError.get().addSuppressed(exception);
            }
        }
    };
    try (KafkaProducer<K, V> producer = new KafkaProducer<>(props)) {
        for (ProducerRecord<K, V> record : records) {
            producer.send(record, callback);
        }
    }
    if (sendingError.get() != null) {
        throw sendingError.get();
    }
}
Also used : ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) BeforeClass(org.junit.BeforeClass) LoggerFactory(org.slf4j.LoggerFactory) AtomicReference(java.util.concurrent.atomic.AtomicReference) MemorySize(org.apache.flink.configuration.MemorySize) ByteBuffer(java.nio.ByteBuffer) SuccessException(org.apache.flink.test.util.SuccessException) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) TaskManagerOptions(org.apache.flink.configuration.TaskManagerOptions) InstantiationUtil(org.apache.flink.util.InstantiationUtil) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) ConfigConstants(org.apache.flink.configuration.ConfigConstants) TestLogger(org.apache.flink.util.TestLogger) Assert.fail(org.junit.Assert.fail) JobExecutionException(org.apache.flink.runtime.client.JobExecutionException) RetryRule(org.apache.flink.testutils.junit.RetryRule) ClassRule(org.junit.ClassRule) RetryOnFailure(org.apache.flink.testutils.junit.RetryOnFailure) ProducerConfig(org.apache.kafka.clients.producer.ProducerConfig) AfterClass(org.junit.AfterClass) Logger(org.slf4j.Logger) Properties(java.util.Properties) FiniteDuration(scala.concurrent.duration.FiniteDuration) Collection(java.util.Collection) Configuration(org.apache.flink.configuration.Configuration) Set(java.util.Set) TimeUnit(java.util.concurrent.TimeUnit) List(java.util.List) Rule(org.junit.Rule) TestStreamEnvironment(org.apache.flink.streaming.util.TestStreamEnvironment) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) JMXReporter(org.apache.flink.metrics.jmx.JMXReporter) Callback(org.apache.kafka.clients.producer.Callback) TemporaryFolder(org.junit.rules.TemporaryFolder) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) Callback(org.apache.kafka.clients.producer.Callback) AtomicReference(java.util.concurrent.atomic.AtomicReference) Properties(java.util.Properties)

Aggregations

ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)193 Test (org.junit.Test)90 KafkaProducer (org.apache.kafka.clients.producer.KafkaProducer)57 Properties (java.util.Properties)50 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)40 ArrayList (java.util.ArrayList)39 Callback (org.apache.kafka.clients.producer.Callback)30 Future (java.util.concurrent.Future)26 TopicPartition (org.apache.kafka.common.TopicPartition)24 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)21 HashMap (java.util.HashMap)20 Random (java.util.Random)19 IOException (java.io.IOException)16 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)16 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)16 KafkaException (org.apache.kafka.common.KafkaException)16 List (java.util.List)13 MockProducer (org.apache.kafka.clients.producer.MockProducer)13 DefaultPartitioner (org.apache.kafka.clients.producer.internals.DefaultPartitioner)12 StreamsException (org.apache.kafka.streams.errors.StreamsException)12