use of org.apache.avro.Conversion in project avro by apache.
the class ProtobufData method getSchema.
public Schema getSchema(Descriptor descriptor) {
Map<Descriptor, Schema> seen = SEEN.get();
if (// stop recursion
seen.containsKey(descriptor))
return seen.get(descriptor);
boolean first = seen.isEmpty();
Conversion conversion = getConversionByDescriptor(descriptor);
if (conversion != null) {
Schema converted = conversion.getRecommendedSchema();
seen.put(descriptor, converted);
return converted;
}
try {
Schema result = Schema.createRecord(descriptor.getName(), null, getNamespace(descriptor.getFile(), descriptor.getContainingType()), false);
seen.put(descriptor, result);
List<Field> fields = new ArrayList<>(descriptor.getFields().size());
for (FieldDescriptor f : descriptor.getFields()) fields.add(Accessor.createField(f.getName(), getSchema(f), null, getDefault(f)));
result.setFields(fields);
return result;
} finally {
if (first)
seen.clear();
}
}
use of org.apache.avro.Conversion in project avro by apache.
the class ReflectData method createSchema.
@Override
protected Schema createSchema(Type type, Map<String, Schema> names) {
if (type instanceof GenericArrayType) {
// generic array
Type component = ((GenericArrayType) type).getGenericComponentType();
if (// byte array
component == Byte.TYPE)
return Schema.create(Schema.Type.BYTES);
Schema result = Schema.createArray(createSchema(component, names));
setElement(result, component);
return result;
} else if (type instanceof ParameterizedType) {
ParameterizedType ptype = (ParameterizedType) type;
Class raw = (Class) ptype.getRawType();
Type[] params = ptype.getActualTypeArguments();
if (Map.class.isAssignableFrom(raw)) {
// Map
Class key = (Class) params[0];
if (isStringable(key)) {
// Stringable key
Schema schema = Schema.createMap(createSchema(params[1], names));
schema.addProp(KEY_CLASS_PROP, key.getName());
return schema;
} else if (key != String.class) {
Schema schema = createNonStringMapSchema(params[0], params[1], names);
schema.addProp(CLASS_PROP, raw.getName());
return schema;
}
} else if (Collection.class.isAssignableFrom(raw)) {
// Collection
if (params.length != 1)
throw new AvroTypeException("No array type specified.");
Schema schema = Schema.createArray(createSchema(params[0], names));
schema.addProp(CLASS_PROP, raw.getName());
return schema;
}
} else if ((type == Byte.class) || (type == Byte.TYPE)) {
Schema result = Schema.create(Schema.Type.INT);
result.addProp(CLASS_PROP, Byte.class.getName());
return result;
} else if ((type == Short.class) || (type == Short.TYPE)) {
Schema result = Schema.create(Schema.Type.INT);
result.addProp(CLASS_PROP, Short.class.getName());
return result;
} else if ((type == Character.class) || (type == Character.TYPE)) {
Schema result = Schema.create(Schema.Type.INT);
result.addProp(CLASS_PROP, Character.class.getName());
return result;
} else if (type instanceof Class) {
// Class
Class<?> c = (Class<?>) type;
while (c.isAnonymousClass()) {
c = c.getSuperclass();
}
if (// primitives
c.isPrimitive() || c == Void.class || c == Boolean.class || c == Integer.class || c == Long.class || c == Float.class || c == Double.class || c == Byte.class || c == Short.class || c == Character.class)
return super.createSchema(type, names);
if (c.isArray()) {
// array
Class component = c.getComponentType();
if (component == Byte.TYPE) {
// byte array
Schema result = Schema.create(Schema.Type.BYTES);
result.addProp(CLASS_PROP, c.getName());
return result;
}
Schema result = Schema.createArray(createSchema(component, names));
result.addProp(CLASS_PROP, c.getName());
setElement(result, component);
return result;
}
AvroSchema explicit = c.getAnnotation(AvroSchema.class);
if (// explicit schema
explicit != null)
return new Schema.Parser().parse(explicit.value());
if (// String
CharSequence.class.isAssignableFrom(c))
return Schema.create(Schema.Type.STRING);
if (// bytes
ByteBuffer.class.isAssignableFrom(c))
return Schema.create(Schema.Type.BYTES);
if (// array
Collection.class.isAssignableFrom(c))
throw new AvroRuntimeException("Can't find element type of Collection");
Conversion<?> conversion = getConversionByClass(c);
if (conversion != null) {
return conversion.getRecommendedSchema();
}
String fullName = c.getName();
Schema schema = names.get(fullName);
if (schema == null) {
// Docstring
AvroDoc annotatedDoc = c.getAnnotation(AvroDoc.class);
String doc = (annotatedDoc != null) ? annotatedDoc.value() : null;
String name = c.getSimpleName();
String space = c.getPackage() == null ? "" : c.getPackage().getName();
if (// nested class
c.getEnclosingClass() != null)
space = c.getEnclosingClass().getName().replace('$', '.');
Union union = c.getAnnotation(Union.class);
if (union != null) {
// union annotated
return getAnnotatedUnion(union, names);
} else if (isStringable(c)) {
// Stringable
Schema result = Schema.create(Schema.Type.STRING);
result.addProp(CLASS_PROP, c.getName());
return result;
} else if (c.isEnum()) {
// Enum
List<String> symbols = new ArrayList<>();
Enum[] constants = (Enum[]) c.getEnumConstants();
for (Enum constant : constants) symbols.add(constant.name());
schema = Schema.createEnum(name, doc, space, symbols);
consumeAvroAliasAnnotation(c, schema);
} else if (GenericFixed.class.isAssignableFrom(c)) {
// fixed
int size = c.getAnnotation(FixedSize.class).value();
schema = Schema.createFixed(name, doc, space, size);
consumeAvroAliasAnnotation(c, schema);
} else if (IndexedRecord.class.isAssignableFrom(c)) {
// specific
return super.createSchema(type, names);
} else {
// record
List<Schema.Field> fields = new ArrayList<>();
boolean error = Throwable.class.isAssignableFrom(c);
schema = Schema.createRecord(name, doc, space, error);
consumeAvroAliasAnnotation(c, schema);
names.put(c.getName(), schema);
for (Field field : getCachedFields(c)) if ((field.getModifiers() & (Modifier.TRANSIENT | Modifier.STATIC)) == 0 && !field.isAnnotationPresent(AvroIgnore.class)) {
Schema fieldSchema = createFieldSchema(field, names);
// Docstring
annotatedDoc = field.getAnnotation(AvroDoc.class);
doc = (annotatedDoc != null) ? annotatedDoc.value() : null;
Object defaultValue = createSchemaDefaultValue(type, field, fieldSchema);
// Rename fields
AvroName annotatedName = field.getAnnotation(AvroName.class);
String fieldName = (annotatedName != null) ? annotatedName.value() : field.getName();
if (STRING_OUTER_PARENT_REFERENCE.equals(fieldName)) {
throw new AvroTypeException("Class " + fullName + " must be a static inner class");
}
Schema.Field recordField = new Schema.Field(fieldName, fieldSchema, doc, defaultValue);
// add metadata
AvroMeta[] metadata = field.getAnnotationsByType(AvroMeta.class);
for (AvroMeta meta : metadata) {
if (recordField.getObjectProps().containsKey(meta.key())) {
throw new AvroTypeException("Duplicate field prop key: " + meta.key());
}
recordField.addProp(meta.key(), meta.value());
}
for (Schema.Field f : fields) {
if (f.name().equals(fieldName))
throw new AvroTypeException("double field entry: " + fieldName);
}
consumeFieldAlias(field, recordField);
fields.add(recordField);
}
if (// add Throwable message
error)
fields.add(new Schema.Field("detailMessage", THROWABLE_MESSAGE, null, null));
schema.setFields(fields);
AvroMeta[] metadata = c.getAnnotationsByType(AvroMeta.class);
for (AvroMeta meta : metadata) {
if (schema.getObjectProps().containsKey(meta.key())) {
throw new AvroTypeException("Duplicate type prop key: " + meta.key());
}
schema.addProp(meta.key(), meta.value());
}
}
names.put(fullName, schema);
}
return schema;
}
return super.createSchema(type, names);
}
use of org.apache.avro.Conversion in project avro by apache.
the class FastReaderBuilder method initializeRecordReader.
private RecordReader initializeRecordReader(RecordReader recordReader, RecordAdjust action) throws IOException {
recordReader.startInitialization();
// generate supplier for the new object instances
Object testInstance = action.instanceSupplier.newInstance(null, action.reader);
IntFunction<Conversion<?>> conversionSupplier = getConversionSupplier(testInstance);
ExecutionStep[] readSteps = new ExecutionStep[action.fieldActions.length + action.readerOrder.length - action.firstDefault];
int i = 0;
int fieldCounter = 0;
// compute what to do with writer's fields
for (; i < action.fieldActions.length; i++) {
Action fieldAction = action.fieldActions[i];
if (fieldAction instanceof Skip) {
readSteps[i] = (r, decoder) -> GenericDatumReader.skip(fieldAction.writer, decoder);
} else {
Field readerField = action.readerOrder[fieldCounter++];
Conversion<?> conversion = conversionSupplier.apply(readerField.pos());
FieldReader reader = getReaderFor(fieldAction, conversion);
readSteps[i] = createFieldSetter(readerField, reader);
}
}
// add defaulting if required
for (; i < readSteps.length; i++) {
readSteps[i] = getDefaultingStep(action.readerOrder[fieldCounter++]);
}
recordReader.finishInitialization(readSteps, action.reader, action.instanceSupplier);
return recordReader;
}
use of org.apache.avro.Conversion in project avro by a0x8o.
the class ProtobufData method getSchema.
public Schema getSchema(Descriptor descriptor) {
Map<Descriptor, Schema> seen = SEEN.get();
if (// stop recursion
seen.containsKey(descriptor))
return seen.get(descriptor);
boolean first = seen.isEmpty();
Conversion conversion = getConversionByDescriptor(descriptor);
if (conversion != null) {
Schema converted = conversion.getRecommendedSchema();
seen.put(descriptor, converted);
return converted;
}
try {
Schema result = Schema.createRecord(descriptor.getName(), null, getNamespace(descriptor.getFile(), descriptor.getContainingType()), false);
seen.put(descriptor, result);
List<Field> fields = new ArrayList<>(descriptor.getFields().size());
for (FieldDescriptor f : descriptor.getFields()) fields.add(Accessor.createField(f.getName(), getSchema(f), null, getDefault(f)));
result.setFields(fields);
return result;
} finally {
if (first)
seen.clear();
}
}
use of org.apache.avro.Conversion in project avro by apache.
the class GenericData method deepCopy.
/**
* Makes a deep copy of a value given its schema.
* <P>
* Logical types are converted to raw types, copied, then converted back.
*
* @param schema the schema of the value to deep copy.
* @param value the value to deep copy.
* @return a deep copy of the given value.
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public <T> T deepCopy(Schema schema, T value) {
if (value == null)
return null;
LogicalType logicalType = schema.getLogicalType();
if (// not a logical type -- use raw copy
logicalType == null)
return (T) deepCopyRaw(schema, value);
Conversion conversion = getConversionByClass(value.getClass(), logicalType);
if (// no conversion defined -- try raw copy
conversion == null)
return (T) deepCopyRaw(schema, value);
// logical type with conversion: convert to raw, copy, then convert back to
// logical
Object raw = Conversions.convertToRawType(value, schema, logicalType, conversion);
// copy raw
Object copy = deepCopyRaw(schema, raw);
return (T) Conversions.convertToLogicalType(copy, schema, logicalType, conversion);
}
Aggregations