use of org.apache.avro.Schema in project camel by apache.
the class AvroDataFormat method loadSchema.
protected Schema loadSchema(String className) throws CamelException, ClassNotFoundException {
// must use same class loading procedure to ensure working in OSGi
Class<?> instanceClass = camelContext.getClassResolver().resolveMandatoryClass(className);
Class<?> genericContainer = camelContext.getClassResolver().resolveMandatoryClass(GENERIC_CONTAINER_CLASSNAME);
if (genericContainer.isAssignableFrom(instanceClass)) {
try {
Method method = instanceClass.getMethod("getSchema");
return (Schema) method.invoke(camelContext.getInjector().newInstance(instanceClass));
} catch (Exception ex) {
throw new CamelException("Error calling getSchema on " + instanceClass, ex);
}
} else {
throw new CamelException("Class " + instanceClass + " must be instanceof " + GENERIC_CONTAINER_CLASSNAME);
}
}
use of org.apache.avro.Schema in project storm by apache.
the class CachedSchemas method getSchema.
public Schema getSchema(String schemaString) {
Schema schema = cache.get(schemaString);
if (schema == null) {
schema = new Schema.Parser().parse(schemaString);
cache.put(schemaString, schema);
}
return schema;
}
use of org.apache.avro.Schema in project storm by apache.
the class AbstractAvroSerializer method read.
@Override
public GenericContainer read(Kryo kryo, Input input, Class<GenericContainer> aClass) {
Schema theSchema = this.getSchema(input.readString());
GenericDatumReader<GenericContainer> reader = new GenericDatumReader<>(theSchema);
Decoder decoder = DecoderFactory.get().directBinaryDecoder(input, null);
GenericContainer foo;
try {
foo = reader.read(null, decoder);
} catch (IOException e) {
throw new RuntimeException(e);
}
return foo;
}
use of org.apache.avro.Schema in project hive by apache.
the class AvroDeserializer method deserializeSingleItemNullableUnion.
private Object deserializeSingleItemNullableUnion(Object datum, Schema fileSchema, Schema recordSchema) throws AvroSerdeException {
// Determine index of value
int tag = GenericData.get().resolveUnion(recordSchema, datum);
Schema schema = recordSchema.getTypes().get(tag);
if (schema.getType().equals(Type.NULL)) {
return null;
}
Schema currentFileSchema = null;
if (fileSchema != null) {
if (fileSchema.getType() == Type.UNION) {
// we need to get the correct tag
try {
tag = GenericData.get().resolveUnion(fileSchema, datum);
currentFileSchema = fileSchema.getTypes().get(tag);
} catch (UnresolvedUnionException e) {
if (LOG.isDebugEnabled()) {
String datumClazz = null;
if (datum != null) {
datumClazz = datum.getClass().getName();
}
String msg = "File schema union could not resolve union. fileSchema = " + fileSchema + ", recordSchema = " + recordSchema + ", datum class = " + datumClazz + ": " + e;
LOG.debug(msg, e);
}
// This occurs when the datum type is different between
// the file and record schema. For example if datum is long
// and the field in the file schema is int. See HIVE-9462.
// in this case we will re-use the record schema as the file
// schema, Ultimately we need to clean this code up and will
// do as a follow-on to HIVE-9462.
currentFileSchema = schema;
}
} else {
currentFileSchema = fileSchema;
}
}
return worker(datum, currentFileSchema, schema, SchemaToTypeInfo.generateTypeInfo(schema, null));
}
use of org.apache.avro.Schema in project hive by apache.
the class AvroDeserializer method workerBase.
// The actual deserialization may involve nested records, which require recursion.
private List<Object> workerBase(List<Object> objectRow, Schema fileSchema, List<String> columnNames, List<TypeInfo> columnTypes, GenericRecord record) throws AvroSerdeException {
for (int i = 0; i < columnNames.size(); i++) {
TypeInfo columnType = columnTypes.get(i);
String columnName = columnNames.get(i);
Object datum = record.get(columnName);
Schema datumSchema = record.getSchema().getField(columnName).schema();
Schema.Field field = AvroSerdeUtils.isNullableType(fileSchema) ? AvroSerdeUtils.getOtherTypeFromNullableType(fileSchema).getField(columnName) : fileSchema.getField(columnName);
objectRow.add(worker(datum, field == null ? null : field.schema(), datumSchema, columnType));
}
return objectRow;
}
Aggregations