use of org.apache.avro.specific.SpecificRecord in project flink by apache.
the class AvroRowDeSerializationSchemaTest method testSpecificSerializeFromClassSeveralTimes.
@Test
public void testSpecificSerializeFromClassSeveralTimes() throws IOException {
final Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> testData = AvroTestUtils.getSpecificTestData();
final AvroRowSerializationSchema serializationSchema = new AvroRowSerializationSchema(testData.f0);
final AvroRowDeserializationSchema deserializationSchema = new AvroRowDeserializationSchema(testData.f0);
serializationSchema.serialize(testData.f2);
serializationSchema.serialize(testData.f2);
final byte[] bytes = serializationSchema.serialize(testData.f2);
final Row actual = deserializationSchema.deserialize(bytes);
assertEquals(testData.f2, actual);
}
use of org.apache.avro.specific.SpecificRecord in project flink by apache.
the class AvroRowDeSerializationSchemaTest method testSpecificDeserializeFromSchemaSeveralTimes.
@Test
public void testSpecificDeserializeFromSchemaSeveralTimes() throws IOException {
final Tuple3<Class<? extends SpecificRecord>, SpecificRecord, Row> testData = AvroTestUtils.getSpecificTestData();
final String schemaString = testData.f1.getSchema().toString();
final AvroRowSerializationSchema serializationSchema = new AvroRowSerializationSchema(schemaString);
final AvroRowDeserializationSchema deserializationSchema = new AvroRowDeserializationSchema(schemaString);
final byte[] bytes = serializationSchema.serialize(testData.f2);
deserializationSchema.deserialize(bytes);
deserializationSchema.deserialize(bytes);
final Row actual = deserializationSchema.deserialize(bytes);
assertEquals(testData.f2, actual);
}
use of org.apache.avro.specific.SpecificRecord in project registry by hortonworks.
the class DefaultAvroSerDesHandler method handlePayloadSerialization.
@Override
public void handlePayloadSerialization(OutputStream outputStream, Object input) {
try {
Schema schema = AvroUtils.computeSchema(input);
Schema.Type schemaType = schema.getType();
if (Schema.Type.BYTES.equals(schemaType)) {
// incase of byte arrays, no need to go through avro as there is not much to optimize and avro is expecting
// the payload to be ByteBuffer instead of a byte array
outputStream.write((byte[]) input);
} else if (Schema.Type.STRING.equals(schemaType)) {
// get UTF-8 bytes and directly send those over instead of using avro.
outputStream.write(input.toString().getBytes("UTF-8"));
} else {
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null);
DatumWriter<Object> writer;
boolean isSpecificRecord = input instanceof SpecificRecord;
if (isSpecificRecord) {
writer = new SpecificDatumWriter<>(schema);
} else {
writer = new GenericDatumWriter<>(schema);
}
writer.write(input, encoder);
encoder.flush();
}
} catch (IOException e) {
throw new AvroRetryableException(e);
} catch (RuntimeException e) {
throw new AvroException(e);
}
}
use of org.apache.avro.specific.SpecificRecord in project spf4j by zolyfarkas.
the class SpecificRecordAppender method writeSerializationError.
@SuppressFBWarnings("ITC_INHERITANCE_TYPE_CHECKING")
static void writeSerializationError(final Object object, final StringBuilder sb, final Exception ex) throws IOException {
if (STRICT_SERIALIZATION) {
if (ex instanceof IOException) {
throw (IOException) ex;
} else if (ex instanceof RuntimeException) {
throw (RuntimeException) ex;
} else {
throw new IllegalStateException(ex);
}
}
sb.setLength(0);
sb.append("{\"SerializationError\":\n");
try (AppendableOutputStream bos = new AppendableOutputStream(sb, Charsets.UTF_8)) {
JThrowable at = Converters.convert(ex);
Schema schema = at.getSchema();
SpecificDatumWriter<SpecificRecord> writer = new SpecificDatumWriter<>(schema);
JsonEncoder jsonEncoder = EF.jsonEncoder(schema, bos, true);
writer.write(at, jsonEncoder);
jsonEncoder.flush();
}
sb.append(",\n");
sb.append("\"ObjectAsString\":\n\"");
EscapeJsonStringAppendableWrapper escaper = new EscapeJsonStringAppendableWrapper(sb);
escaper.append(object.toString());
sb.append("\"}");
}
use of org.apache.avro.specific.SpecificRecord in project samza by apache.
the class AzureBlobAvroWriter method startNextBlob.
private void startNextBlob(Optional<IndexedRecord> optionalIndexedRecord) throws IOException {
if (currentBlobWriterComponents != null) {
LOG.info("Starting new blob as current blob size is " + currentBlobWriterComponents.azureBlobOutputStream.getSize() + " and max blob size is " + maxBlobSize + " or number of records is " + recordsInCurrentBlob + " and max records in blob is " + maxRecordsPerBlob);
currentBlobWriterComponents.dataFileWriter.flush();
currentBlobWriterComponents.azureBlobOutputStream.releaseBuffer();
recordsInCurrentBlob = 0;
}
// optionalIndexedRecord is the first message in this case.
if (datumWriter == null) {
if (optionalIndexedRecord.isPresent()) {
IndexedRecord record = optionalIndexedRecord.get();
schema = record.getSchema();
if (record instanceof SpecificRecord) {
datumWriter = new SpecificDatumWriter<>(schema);
} else {
datumWriter = new GenericDatumWriter<>(schema);
}
} else {
throw new IllegalStateException("Writing without schema setup.");
}
}
String blobURL;
if (useRandomStringInBlobName) {
blobURL = String.format(BLOB_NAME_RANDOM_STRING_AVRO, blobURLPrefix, UTC_FORMATTER.format(System.currentTimeMillis()), UUID.randomUUID().toString().substring(0, 8), compression.getFileExtension());
} else {
blobURL = String.format(BLOB_NAME_AVRO, blobURLPrefix, UTC_FORMATTER.format(System.currentTimeMillis()), compression.getFileExtension());
}
LOG.info("Creating new blob: {}", blobURL);
BlockBlobAsyncClient blockBlobAsyncClient = containerAsyncClient.getBlobAsyncClient(blobURL).getBlockBlobAsyncClient();
DataFileWriter<IndexedRecord> dataFileWriter = new DataFileWriter<>(datumWriter);
AzureBlobOutputStream azureBlobOutputStream;
try {
azureBlobOutputStream = new AzureBlobOutputStream(blockBlobAsyncClient, blobThreadPool, metrics, blobMetadataGeneratorFactory, blobMetadataGeneratorConfig, streamName, flushTimeoutMs, maxBlockFlushThresholdSize, compression);
} catch (Exception e) {
throw new SamzaException("Unable to create AzureBlobOutputStream", e);
}
dataFileWriter.create(schema, azureBlobOutputStream);
dataFileWriter.setFlushOnEveryBlock(false);
this.currentBlobWriterComponents = new BlobWriterComponents(dataFileWriter, azureBlobOutputStream, blockBlobAsyncClient);
allBlobWriterComponents.add(this.currentBlobWriterComponents);
LOG.info("Created new blob: {}", blobURL);
}
Aggregations