use of org.apache.avro.io.BinaryEncoder in project databus by linkedin.
the class AvroConverter method convert.
public void convert(InputStream in, OutputStream out) throws IOException {
JsonGenerator jsonGenerator = (new JsonFactory()).createJsonGenerator(new OutputStreamWriter(out));
if (AvroFormat.JSON == _outputFormat)
jsonGenerator.useDefaultPrettyPrinter();
List<GenericRecord> result = convert(in);
Encoder outputEncoder = (AvroFormat.BINARY == _outputFormat) ? new BinaryEncoder(out) : new JsonEncoder(_outputSchema, jsonGenerator);
GenericDatumWriter<GenericRecord> genericWriter = new GenericDatumWriter<GenericRecord>(_outputSchema);
for (GenericRecord r : result) {
genericWriter.write(r, outputEncoder);
}
outputEncoder.flush();
out.flush();
}
use of org.apache.avro.io.BinaryEncoder in project databus by linkedin.
the class OpenReplicatorAvroEventFactory method serializeEvent.
protected byte[] serializeEvent(GenericRecord record) throws EventCreationException {
// Serialize the row
byte[] serializedValue;
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
Encoder encoder = new BinaryEncoder(bos);
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
writer.write(record, encoder);
serializedValue = bos.toByteArray();
} catch (IOException ex) {
throw new EventCreationException("Failed to serialize the Avro GenericRecord", ex);
} catch (RuntimeException ex) {
// Avro likes to throw RuntimeExceptions instead of checked exceptions when serialization fails.
_log.error("Exception for record: " + record + " with schema: " + record.getSchema().getFullName());
throw new EventCreationException("Failed to serialize the Avro GenericRecord", ex);
}
return serializedValue;
}
use of org.apache.avro.io.BinaryEncoder in project databus by linkedin.
the class OracleAvroGenericEventFactory method serializeEvent.
protected byte[] serializeEvent(GenericRecord record, long scn, long timestamp, ResultSet row, DbusEventBufferAppendable eventBuffer, boolean enableTracing, DbusEventsStatisticsCollector dbusEventsStatisticsCollector) throws EventCreationException, UnsupportedKeyException {
// Serialize the row
byte[] serializedValue;
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
Encoder encoder = new BinaryEncoder(bos);
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema());
writer.write(record, encoder);
serializedValue = bos.toByteArray();
} catch (IOException ex) {
throw new EventCreationException("Failed to serialize the Avro GenericRecord. ResultSet was: (" + row + ")", ex);
} catch (RuntimeException ex) {
// Avro likes to throw RuntimeExceptions instead of checked exceptions when serialization fails.
throw new EventCreationException("Failed to serialize the Avro GenericRecord. ResultSet was: (" + row + ")", ex);
}
return serializedValue;
}
use of org.apache.avro.io.BinaryEncoder in project pinot by linkedin.
the class BaseClusterIntegrationTest method pushRandomAvroIntoKafka.
public static void pushRandomAvroIntoKafka(File avroFile, String kafkaBroker, String kafkaTopic, int rowCount, Random random) {
Properties properties = new Properties();
properties.put("metadata.broker.list", kafkaBroker);
properties.put("serializer.class", "kafka.serializer.DefaultEncoder");
properties.put("request.required.acks", "1");
ProducerConfig producerConfig = new ProducerConfig(properties);
Producer<String, byte[]> producer = new Producer<String, byte[]>(producerConfig);
try {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream(65536);
DataFileStream<GenericRecord> reader = AvroUtils.getAvroReader(avroFile);
BinaryEncoder binaryEncoder = new EncoderFactory().directBinaryEncoder(outputStream, null);
Schema avroSchema = reader.getSchema();
GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(avroSchema);
int recordCount = 0;
int rowsRemaining = rowCount;
int messagesInThisBatch = 0;
while (rowsRemaining > 0) {
int rowsInThisBatch = Math.min(rowsRemaining, MAX_MESSAGES_PER_BATCH);
List<KeyedMessage<String, byte[]>> messagesToWrite = new ArrayList<KeyedMessage<String, byte[]>>(rowsInThisBatch);
GenericRecord genericRecord = new GenericData.Record(avroSchema);
for (int i = 0; i < rowsInThisBatch; ++i) {
generateRandomRecord(genericRecord, avroSchema, random);
outputStream.reset();
datumWriter.write(genericRecord, binaryEncoder);
binaryEncoder.flush();
byte[] bytes = outputStream.toByteArray();
KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(kafkaTopic, bytes);
if (BATCH_KAFKA_MESSAGES) {
messagesToWrite.add(data);
messagesInThisBatch++;
if (MAX_MESSAGES_PER_BATCH <= messagesInThisBatch) {
messagesInThisBatch = 0;
producer.send(messagesToWrite);
messagesToWrite.clear();
Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
}
} else {
producer.send(data);
}
recordCount += 1;
}
if (BATCH_KAFKA_MESSAGES) {
producer.send(messagesToWrite);
}
// System.out.println("rowsRemaining = " + rowsRemaining);
rowsRemaining -= rowsInThisBatch;
}
outputStream.close();
reader.close();
LOGGER.info("Finished writing " + recordCount + " records from " + avroFile.getName() + " into Kafka topic " + kafkaTopic);
int totalRecordCount = totalAvroRecordWrittenCount.addAndGet(recordCount);
LOGGER.info("Total records written so far " + totalRecordCount);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
use of org.apache.avro.io.BinaryEncoder in project databus by linkedin.
the class DummySuccessfulErrorCountingConsumer method createSampleSchema1Events.
static DbusEventInfo[] createSampleSchema1Events(int eventsNum) throws IOException {
Random rng = new Random();
DbusEventInfo[] result = new DbusEventInfo[eventsNum];
GenericDatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(SOURCE1_SCHEMA);
for (int i = 0; i < eventsNum; ++i) {
GenericRecord r = new GenericData.Record(SOURCE1_SCHEMA);
String s = RngUtils.randomString(rng.nextInt(100));
r.put("s", s);
ByteArrayOutputStream baos = new ByteArrayOutputStream(s.length() + 100);
BinaryEncoder out = new BinaryEncoder(baos);
try {
writer.write(r, out);
out.flush();
result[i] = new DbusEventInfo(DbusOpcode.UPSERT, 1, (short) 1, (short) 1, System.nanoTime(), (short) 1, SOURCE1_SCHEMAID, baos.toByteArray(), false, true);
result[i].setEventSerializationVersion(_eventFactory.getVersion());
} finally {
baos.close();
}
}
return result;
}
Aggregations