Search in sources :

Example 41 with GenericDatumWriter

use of org.apache.avro.generic.GenericDatumWriter in project pinot by linkedin.

the class BaseClusterIntegrationTest method pushRandomAvroIntoKafka.

public static void pushRandomAvroIntoKafka(File avroFile, String kafkaBroker, String kafkaTopic, int rowCount, Random random) {
    Properties properties = new Properties();
    properties.put("metadata.broker.list", kafkaBroker);
    properties.put("serializer.class", "kafka.serializer.DefaultEncoder");
    properties.put("request.required.acks", "1");
    ProducerConfig producerConfig = new ProducerConfig(properties);
    Producer<String, byte[]> producer = new Producer<String, byte[]>(producerConfig);
    try {
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream(65536);
        DataFileStream<GenericRecord> reader = AvroUtils.getAvroReader(avroFile);
        BinaryEncoder binaryEncoder = new EncoderFactory().directBinaryEncoder(outputStream, null);
        Schema avroSchema = reader.getSchema();
        GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(avroSchema);
        int recordCount = 0;
        int rowsRemaining = rowCount;
        int messagesInThisBatch = 0;
        while (rowsRemaining > 0) {
            int rowsInThisBatch = Math.min(rowsRemaining, MAX_MESSAGES_PER_BATCH);
            List<KeyedMessage<String, byte[]>> messagesToWrite = new ArrayList<KeyedMessage<String, byte[]>>(rowsInThisBatch);
            GenericRecord genericRecord = new GenericData.Record(avroSchema);
            for (int i = 0; i < rowsInThisBatch; ++i) {
                generateRandomRecord(genericRecord, avroSchema, random);
                outputStream.reset();
                datumWriter.write(genericRecord, binaryEncoder);
                binaryEncoder.flush();
                byte[] bytes = outputStream.toByteArray();
                KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(kafkaTopic, bytes);
                if (BATCH_KAFKA_MESSAGES) {
                    messagesToWrite.add(data);
                    messagesInThisBatch++;
                    if (MAX_MESSAGES_PER_BATCH <= messagesInThisBatch) {
                        messagesInThisBatch = 0;
                        producer.send(messagesToWrite);
                        messagesToWrite.clear();
                        Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
                    }
                } else {
                    producer.send(data);
                }
                recordCount += 1;
            }
            if (BATCH_KAFKA_MESSAGES) {
                producer.send(messagesToWrite);
            }
            //        System.out.println("rowsRemaining = " + rowsRemaining);
            rowsRemaining -= rowsInThisBatch;
        }
        outputStream.close();
        reader.close();
        LOGGER.info("Finished writing " + recordCount + " records from " + avroFile.getName() + " into Kafka topic " + kafkaTopic);
        int totalRecordCount = totalAvroRecordWrittenCount.addAndGet(recordCount);
        LOGGER.info("Total records written so far " + totalRecordCount);
    } catch (Exception e) {
        e.printStackTrace();
        throw new RuntimeException(e);
    }
}
Also used : EncoderFactory(org.apache.avro.io.EncoderFactory) Schema(org.apache.avro.Schema) ArrayList(java.util.ArrayList) ByteArrayOutputStream(java.io.ByteArrayOutputStream) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) Properties(java.util.Properties) JSONException(org.json.JSONException) ArchiveException(org.apache.commons.compress.archivers.ArchiveException) SQLException(java.sql.SQLException) IOException(java.io.IOException) Producer(kafka.javaapi.producer.Producer) BinaryEncoder(org.apache.avro.io.BinaryEncoder) ProducerConfig(kafka.producer.ProducerConfig) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord) KeyedMessage(kafka.producer.KeyedMessage)

Example 42 with GenericDatumWriter

use of org.apache.avro.generic.GenericDatumWriter in project databus by linkedin.

the class DummySuccessfulErrorCountingConsumer method createSampleSchema1Events.

static DbusEventInfo[] createSampleSchema1Events(int eventsNum) throws IOException {
    Random rng = new Random();
    DbusEventInfo[] result = new DbusEventInfo[eventsNum];
    GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(SOURCE1_SCHEMA);
    for (int i = 0; i < eventsNum; ++i) {
        GenericRecord r = new GenericData.Record(SOURCE1_SCHEMA);
        String s = RngUtils.randomString(rng.nextInt(100));
        r.put("s", s);
        ByteArrayOutputStream baos = new ByteArrayOutputStream(s.length() + 100);
        BinaryEncoder out = new BinaryEncoder(baos);
        try {
            writer.write(r, out);
            out.flush();
            result[i] = new DbusEventInfo(DbusOpcode.UPSERT, 1, (short) 1, (short) 1, System.nanoTime(), (short) 1, SOURCE1_SCHEMAID, baos.toByteArray(), false, true);
            result[i].setEventSerializationVersion(_eventFactory.getVersion());
        } finally {
            baos.close();
        }
    }
    return result;
}
Also used : DbusEventInfo(com.linkedin.databus.core.DbusEventInfo) Random(java.util.Random) BinaryEncoder(org.apache.avro.io.BinaryEncoder) GenericRecord(org.apache.avro.generic.GenericRecord) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) ByteArrayOutputStream(java.io.ByteArrayOutputStream) GenericRecord(org.apache.avro.generic.GenericRecord) Checkpoint(com.linkedin.databus.core.Checkpoint)

Example 43 with GenericDatumWriter

use of org.apache.avro.generic.GenericDatumWriter in project avro-kafka-storm by ransilberman.

the class MainTest method testGenericRecord.

@Test
public void testGenericRecord() throws IOException, InterruptedException {
    Schema.Parser parser = new Schema.Parser();
    Schema schema = parser.parse(getClass().getResourceAsStream("LPEvent.avsc"));
    GenericRecord datum = new GenericData.Record(schema);
    datum.put("revision", 1L);
    datum.put("siteId", "28280110");
    datum.put("eventType", "PLine");
    datum.put("timeStamp", System.currentTimeMillis());
    datum.put("sessionId", "123456II");
    Map<String, Schema> unions = new HashMap<String, Schema>();
    List<Schema> typeList = schema.getField("subrecord").schema().getTypes();
    for (Schema sch : typeList) {
        unions.put(sch.getName(), sch);
    }
    GenericRecord plineDatum = new GenericData.Record(unions.get("pline"));
    plineDatum.put("text", "How can I help you?");
    plineDatum.put("lineType", 1);
    plineDatum.put("repId", "REPID12345");
    datum.put("subrecord", plineDatum);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
    Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
    writer.write(datum, encoder);
    encoder.flush();
    out.close();
    Message message = new Message(out.toByteArray());
    Properties props = new Properties();
    props.put("zk.connect", zkConnection);
    Producer<Message, Message> producer = new kafka.javaapi.producer.Producer<Message, Message>(new ProducerConfig(props));
    producer.send(new ProducerData<Message, Message>(topic, message));
}
Also used : Message(kafka.message.Message) HashMap(java.util.HashMap) Schema(org.apache.avro.Schema) ByteArrayOutputStream(java.io.ByteArrayOutputStream) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) Properties(java.util.Properties) Producer(kafka.javaapi.producer.Producer) Encoder(org.apache.avro.io.Encoder) ProducerConfig(kafka.producer.ProducerConfig) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord) Test(org.junit.Test)

Example 44 with GenericDatumWriter

use of org.apache.avro.generic.GenericDatumWriter in project avro-kafka-storm by ransilberman.

the class MainTest method testDataFile.

@Test
public void testDataFile() throws IOException {
    File fileOut = new File("data.avro");
    File fileIn = new File("data.avro");
    Schema.Parser parser = new Schema.Parser();
    Schema schema = parser.parse(getClass().getResourceAsStream("LPEvent.avsc"));
    GenericRecord datum = new GenericData.Record(schema);
    datum.put("revision", 1L);
    datum.put("siteId", "28280110");
    datum.put("eventType", "PLine");
    datum.put("timeStamp", System.currentTimeMillis());
    datum.put("sessionId", "123456II");
    Map<String, Schema> unions = new HashMap<String, Schema>();
    List<Schema> typeList = schema.getField("subrecord").schema().getTypes();
    for (Schema sch : typeList) {
        unions.put(sch.getName(), sch);
    }
    GenericRecord plineDatum = new GenericData.Record(unions.get("pline"));
    plineDatum.put("text", "How can I help you?");
    plineDatum.put("lineType", 1);
    plineDatum.put("repId", "REPID12345");
    datum.put("subrecord", plineDatum);
    //write the file
    DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(schema);
    DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(writer);
    dataFileWriter.create(schema, fileOut);
    dataFileWriter.append(datum);
    dataFileWriter.append(datum);
    dataFileWriter.append(datum);
    dataFileWriter.close();
    //read the file
    DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
    DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(fileIn, reader);
    assertThat("Scema is the same", schema, is(dataFileReader.getSchema()));
    for (GenericRecord record : dataFileReader) {
        assertThat(record.get("siteId").toString(), is("28280110"));
        assertThat(record.get("eventType").toString(), is("PLine"));
    }
}
Also used : HashMap(java.util.HashMap) GenericDatumReader(org.apache.avro.generic.GenericDatumReader) Schema(org.apache.avro.Schema) DataFileWriter(org.apache.avro.file.DataFileWriter) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) DataFileReader(org.apache.avro.file.DataFileReader) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord) File(java.io.File) Test(org.junit.Test)

Example 45 with GenericDatumWriter

use of org.apache.avro.generic.GenericDatumWriter in project core by s4.

the class AvroSerDeser method serialize.

public static byte[] serialize(Schema schema, GenericRecord content) throws IOException {
    GenericDatumWriter<GenericRecord> serveWriter = new GenericDatumWriter<GenericRecord>(schema);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    serveWriter.write(content, new BinaryEncoder(out));
    return out.toByteArray();
}
Also used : BinaryEncoder(org.apache.avro.io.BinaryEncoder) GenericDatumWriter(org.apache.avro.generic.GenericDatumWriter) ByteArrayOutputStream(java.io.ByteArrayOutputStream) GenericRecord(org.apache.avro.generic.GenericRecord)

Aggregations

GenericDatumWriter (org.apache.avro.generic.GenericDatumWriter)49 GenericRecord (org.apache.avro.generic.GenericRecord)46 ByteArrayOutputStream (java.io.ByteArrayOutputStream)24 Schema (org.apache.avro.Schema)23 DataFileWriter (org.apache.avro.file.DataFileWriter)17 BinaryEncoder (org.apache.avro.io.BinaryEncoder)17 IOException (java.io.IOException)13 Encoder (org.apache.avro.io.Encoder)12 File (java.io.File)9 Test (org.junit.Test)6 FileOutputStream (java.io.FileOutputStream)4 GenericDatumReader (org.apache.avro.generic.GenericDatumReader)4 ArrayList (java.util.ArrayList)3 Properties (java.util.Properties)3 Producer (kafka.javaapi.producer.Producer)3 ProducerConfig (kafka.producer.ProducerConfig)3 GenericRecordBuilder (org.apache.avro.generic.GenericRecordBuilder)3 JsonEncoder (org.apache.avro.io.JsonEncoder)3 AvroAdapter (com.linkedin.data.avro.AvroAdapter)2 DbusEventInfo (com.linkedin.databus.core.DbusEventInfo)2