use of org.apache.avro.io.DecoderFactory in project components by Talend.
the class JdbcComponentTestIT method testGetData.
@Test
public void testGetData() throws java.io.IOException {
// given
UiSpecsPropertiesDto propertiesDto = new UiSpecsPropertiesDto();
propertiesDto.setProperties(getFileAsObjectNode("jdbc_data_set_properties_with_schema.json"));
propertiesDto.setDependencies(singletonList(getJdbcDataStoreProperties()));
String dataSetDefinitionName = "JDBCDataset";
Response schemaResponse = //
given().content(propertiesDto).contentType(APPLICATION_JSON_UTF8_VALUE).accept(//
APPLICATION_JSON_UTF8_VALUE).expect().statusCode(200).log().ifError().post(getVersionPrefix() + "/runtimes/schema");
Schema schema = new Schema.Parser().parse(schemaResponse.asInputStream());
// when
Response response = //
given().content(propertiesDto).contentType(APPLICATION_JSON_UTF8_VALUE).accept(//
RuntimesController.AVRO_JSON_MIME_TYPE_OFFICIAL_INVALID).expect().statusCode(200).log().ifError().post(getVersionPrefix() + "/runtimes/data");
// then
GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
DecoderFactory decoderFactory = DecoderFactory.get();
Decoder decoder = decoderFactory.jsonDecoder(schema, response.asInputStream());
assertRecordsEqualsToTestValues(reader, decoder);
}
use of org.apache.avro.io.DecoderFactory in project pinot by linkedin.
the class KafkaAvroMessageDecoder method init.
@Override
public void init(Map<String, String> props, Schema indexingSchema, String topicName) throws Exception {
schemaRegistryBaseUrl = props.get(SCHEMA_REGISTRY_REST_URL);
StringUtils.chomp(schemaRegistryBaseUrl, "/");
String avroSchemaName = topicName;
if (props.containsKey(SCHEMA_REGISTRY_SCHEMA_NAME) && props.get(SCHEMA_REGISTRY_SCHEMA_NAME) != null && !props.get(SCHEMA_REGISTRY_SCHEMA_NAME).isEmpty()) {
avroSchemaName = props.get(SCHEMA_REGISTRY_SCHEMA_NAME);
}
defaultAvroSchema = fetchSchema(new URL(schemaRegistryBaseUrl + "/latest_with_type=" + avroSchemaName));
this.avroRecordConvetrer = new AvroRecordToPinotRowGenerator(indexingSchema);
this.decoderFactory = new DecoderFactory();
md5ToAvroSchemaMap = new HashMap<String, org.apache.avro.Schema>();
}
use of org.apache.avro.io.DecoderFactory in project components by Talend.
the class JdbcComponentTestIT method testGetDataBinary.
@Test
public void testGetDataBinary() throws java.io.IOException {
// given
UiSpecsPropertiesDto propertiesDto = new UiSpecsPropertiesDto();
propertiesDto.setProperties(getFileAsObjectNode("jdbc_data_set_properties_with_schema.json"));
propertiesDto.setDependencies(singletonList(getJdbcDataStoreProperties()));
String dataSetDefinitionName = "JDBCDataset";
// when
Response schemaResponse = //
given().content(propertiesDto).contentType(APPLICATION_JSON_UTF8_VALUE).accept(//
APPLICATION_JSON_UTF8_VALUE).expect().statusCode(200).log().ifError().post(getVersionPrefix() + "/runtimes/schema");
Schema schema = new Schema.Parser().parse(schemaResponse.asInputStream());
Response response = //
given().content(propertiesDto).contentType(APPLICATION_JSON_UTF8_VALUE).accept(//
RuntimesController.AVRO_BINARY_MIME_TYPE_OFFICIAL_INVALID).expect().statusCode(200).log().ifError().post(getVersionPrefix() + "/runtimes/data");
// then
GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
DecoderFactory decoderFactory = DecoderFactory.get();
Decoder decoder = decoderFactory.binaryDecoder(response.asInputStream(), null);
assertRecordsEqualsToTestValues(reader, decoder);
}
use of org.apache.avro.io.DecoderFactory in project incubator-gobblin by apache.
the class AvroUtils method convertRecordSchema.
/**
* Change the schema of an Avro record.
* @param record The Avro record whose schema is to be changed.
* @param newSchema The target schema. It must be compatible as reader schema with record.getSchema() as writer schema.
* @return a new Avro record with the new schema.
* @throws IOException if conversion failed.
*/
public static GenericRecord convertRecordSchema(GenericRecord record, Schema newSchema) throws IOException {
if (record.getSchema().equals(newSchema)) {
return record;
}
try {
BinaryDecoder decoder = new DecoderFactory().binaryDecoder(recordToByteArray(record), null);
DatumReader<GenericRecord> reader = new GenericDatumReader<>(record.getSchema(), newSchema);
return reader.read(null, decoder);
} catch (IOException e) {
throw new IOException(String.format("Cannot convert avro record to new schema. Origianl schema = %s, new schema = %s", record.getSchema(), newSchema), e);
}
}
Aggregations