use of org.apache.avro.LogicalType in project parquet-mr by apache.
the class AvroWriteSupport method writeValue.
/**
* Calls an appropriate write method based on the value.
* Value MUST not be null.
*
* @param type the Parquet type
* @param avroSchema the Avro schema
* @param value a non-null value to write
*/
private void writeValue(Type type, Schema avroSchema, Object value) {
Schema nonNullAvroSchema = AvroSchemaConverter.getNonNull(avroSchema);
LogicalType logicalType = nonNullAvroSchema.getLogicalType();
if (logicalType != null) {
Conversion<?> conversion = model.getConversionByClass(value.getClass(), logicalType);
writeValueWithoutConversion(type, nonNullAvroSchema, convert(nonNullAvroSchema, logicalType, conversion, value));
} else {
writeValueWithoutConversion(type, nonNullAvroSchema, value);
}
}
use of org.apache.avro.LogicalType in project parquet-mr by apache.
the class AvroIndexedRecordConverter method newConverter.
private static Converter newConverter(Schema schema, Type type, GenericData model, ParentValueContainer setter) {
LogicalType logicalType = schema.getLogicalType();
// the expected type is always null because it is determined by the parent
// datum class, which never helps for generic. when logical types are added
// to specific, this should pass the expected type here.
Conversion<?> conversion = model.getConversionFor(logicalType);
ParentValueContainer parent = ParentValueContainer.getConversionContainer(setter, conversion, schema);
if (schema.getType().equals(Schema.Type.BOOLEAN)) {
return new AvroConverters.FieldBooleanConverter(parent);
} else if (schema.getType().equals(Schema.Type.INT)) {
return new AvroConverters.FieldIntegerConverter(parent);
} else if (schema.getType().equals(Schema.Type.LONG)) {
return new AvroConverters.FieldLongConverter(parent);
} else if (schema.getType().equals(Schema.Type.FLOAT)) {
return new AvroConverters.FieldFloatConverter(parent);
} else if (schema.getType().equals(Schema.Type.DOUBLE)) {
return new AvroConverters.FieldDoubleConverter(parent);
} else if (schema.getType().equals(Schema.Type.BYTES)) {
return new AvroConverters.FieldByteBufferConverter(parent);
} else if (schema.getType().equals(Schema.Type.STRING)) {
return new AvroConverters.FieldStringConverter(parent);
} else if (schema.getType().equals(Schema.Type.RECORD)) {
return new AvroIndexedRecordConverter(parent, type.asGroupType(), schema, model);
} else if (schema.getType().equals(Schema.Type.ENUM)) {
return new FieldEnumConverter(parent, schema, model);
} else if (schema.getType().equals(Schema.Type.ARRAY)) {
return new AvroArrayConverter(parent, type.asGroupType(), schema, model);
} else if (schema.getType().equals(Schema.Type.MAP)) {
return new MapConverter(parent, type.asGroupType(), schema, model);
} else if (schema.getType().equals(Schema.Type.UNION)) {
return new AvroUnionConverter(parent, type, schema, model);
} else if (schema.getType().equals(Schema.Type.FIXED)) {
return new FieldFixedConverter(parent, schema, model);
}
throw new UnsupportedOperationException(String.format("Cannot convert Avro type: %s" + " (Parquet type: %s) ", schema, type));
}
use of org.apache.avro.LogicalType in project parquet-mr by apache.
the class RecordWithUUIDList method testPairRecord.
@Test
@SuppressWarnings("unchecked")
public void testPairRecord() throws IOException {
ReflectData model = new ReflectData();
model.addLogicalTypeConversion(new Conversion<Pair>() {
@Override
public Class<Pair> getConvertedType() {
return Pair.class;
}
@Override
public String getLogicalTypeName() {
return "pair";
}
@Override
public Pair fromRecord(IndexedRecord value, Schema schema, LogicalType type) {
return Pair.of(value.get(0), value.get(1));
}
@Override
public IndexedRecord toRecord(Pair value, Schema schema, LogicalType type) {
GenericData.Record record = new GenericData.Record(schema);
record.put(0, value.first);
record.put(1, value.second);
return record;
}
});
LogicalTypes.register("pair", new LogicalTypes.LogicalTypeFactory() {
private final LogicalType PAIR = new LogicalType("pair");
@Override
public LogicalType fromSchema(Schema schema) {
return PAIR;
}
});
Schema schema = model.getSchema(PairRecord.class);
Assert.assertEquals("Should have the correct record name", "org.apache.parquet.avro.TestReflectLogicalTypes$", schema.getNamespace());
Assert.assertEquals("Should have the correct record name", "PairRecord", schema.getName());
Assert.assertEquals("Should have the correct logical type", "pair", LogicalTypes.fromSchema(schema.getField("pair").schema()).getName());
PairRecord record = new PairRecord();
record.pair = Pair.of(34L, 35L);
List<PairRecord> expected = new ArrayList<PairRecord>();
expected.add(record);
File test = write(model, schema, record);
Pair<Long, Long> actual = AvroTestUtil.<PairRecord>read(model, schema, test).get(0).pair;
Assert.assertEquals("Data should match after serialization round-trip", 34L, (long) actual.first);
Assert.assertEquals("Data should match after serialization round-trip", 35L, (long) actual.second);
}
use of org.apache.avro.LogicalType in project components by Talend.
the class TalendTypeTest method testConvertFromAvroUnsupportedLogicalType.
/**
* Checks {@link TalendType#convertFromAvro(Schema)} throws {@link UnsupportedOperationException} with following message
* "Unrecognized type",
* when unknown logical type is passed
*/
@Test
public void testConvertFromAvroUnsupportedLogicalType() {
thrown.expect(UnsupportedOperationException.class);
thrown.expectMessage("Unrecognized type unsupported");
LogicalType unsupported = new LogicalType("unsupported");
Schema fieldSchema = unsupported.addToSchema(AvroUtils._string());
TalendType.convertFromAvro(fieldSchema);
}
Aggregations