use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class BigQueryUtils method convertAvroRecordToMap.
private static Object convertAvroRecordToMap(FieldType beamField, Object value, BigQueryUtils.ConversionOptions options) {
List<GenericData.Record> records = (List<GenericData.Record>) value;
ImmutableMap.Builder<Object, Object> ret = ImmutableMap.builder();
FieldType keyElement = beamField.getMapKeyType();
FieldType valueElement = beamField.getMapValueType();
for (GenericData.Record record : records) {
ret.put(convertAvroFormat(keyElement, record.get(0), options), convertAvroFormat(valueElement, record.get(1), options));
}
return ret.build();
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class SchemaTranslation method fieldTypeToProto.
private static SchemaApi.FieldType fieldTypeToProto(FieldType fieldType, boolean serializeLogicalType) {
SchemaApi.FieldType.Builder builder = SchemaApi.FieldType.newBuilder();
switch(fieldType.getTypeName()) {
case ROW:
builder.setRowType(SchemaApi.RowType.newBuilder().setSchema(schemaToProto(fieldType.getRowSchema(), serializeLogicalType)));
break;
case ARRAY:
builder.setArrayType(SchemaApi.ArrayType.newBuilder().setElementType(fieldTypeToProto(fieldType.getCollectionElementType(), serializeLogicalType)));
break;
case ITERABLE:
builder.setIterableType(SchemaApi.IterableType.newBuilder().setElementType(fieldTypeToProto(fieldType.getCollectionElementType(), serializeLogicalType)));
break;
case MAP:
builder.setMapType(SchemaApi.MapType.newBuilder().setKeyType(fieldTypeToProto(fieldType.getMapKeyType(), serializeLogicalType)).setValueType(fieldTypeToProto(fieldType.getMapValueType(), serializeLogicalType)).build());
break;
case LOGICAL_TYPE:
LogicalType logicalType = fieldType.getLogicalType();
SchemaApi.LogicalType.Builder logicalTypeBuilder;
if (STANDARD_LOGICAL_TYPES.containsKey(logicalType.getIdentifier())) {
Preconditions.checkArgument(logicalType.getArgumentType() == null, "Logical type '%s' cannot be used as a logical type, it has a non-null argument type.", logicalType.getIdentifier());
logicalTypeBuilder = SchemaApi.LogicalType.newBuilder().setRepresentation(fieldTypeToProto(logicalType.getBaseType(), serializeLogicalType)).setUrn(logicalType.getIdentifier());
} else if (logicalType instanceof UnknownLogicalType) {
logicalTypeBuilder = SchemaApi.LogicalType.newBuilder().setUrn(logicalType.getIdentifier()).setPayload(ByteString.copyFrom(((UnknownLogicalType) logicalType).getPayload())).setRepresentation(fieldTypeToProto(logicalType.getBaseType(), serializeLogicalType));
if (logicalType.getArgumentType() != null) {
logicalTypeBuilder.setArgumentType(fieldTypeToProto(logicalType.getArgumentType(), serializeLogicalType)).setArgument(fieldValueToProto(logicalType.getArgumentType(), logicalType.getArgument()));
}
} else {
logicalTypeBuilder = SchemaApi.LogicalType.newBuilder().setRepresentation(fieldTypeToProto(logicalType.getBaseType(), serializeLogicalType)).setUrn(URN_BEAM_LOGICAL_JAVASDK);
if (logicalType.getArgumentType() != null) {
logicalTypeBuilder = logicalTypeBuilder.setArgumentType(fieldTypeToProto(logicalType.getArgumentType(), serializeLogicalType)).setArgument(fieldValueToProto(logicalType.getArgumentType(), logicalType.getArgument()));
}
if (serializeLogicalType) {
logicalTypeBuilder = logicalTypeBuilder.setPayload(ByteString.copyFrom(SerializableUtils.serializeToByteArray(logicalType)));
}
}
builder.setLogicalType(logicalTypeBuilder.build());
break;
// but not yet in Java. (BEAM-7554)
case DATETIME:
builder.setLogicalType(SchemaApi.LogicalType.newBuilder().setUrn(URN_BEAM_LOGICAL_DATETIME).setRepresentation(fieldTypeToProto(FieldType.INT64, serializeLogicalType)).build());
break;
case DECIMAL:
builder.setLogicalType(SchemaApi.LogicalType.newBuilder().setUrn(URN_BEAM_LOGICAL_DECIMAL).setRepresentation(fieldTypeToProto(FieldType.BYTES, serializeLogicalType)).build());
break;
case BYTE:
builder.setAtomicType(SchemaApi.AtomicType.BYTE);
break;
case INT16:
builder.setAtomicType(SchemaApi.AtomicType.INT16);
break;
case INT32:
builder.setAtomicType(SchemaApi.AtomicType.INT32);
break;
case INT64:
builder.setAtomicType(SchemaApi.AtomicType.INT64);
break;
case FLOAT:
builder.setAtomicType(SchemaApi.AtomicType.FLOAT);
break;
case DOUBLE:
builder.setAtomicType(SchemaApi.AtomicType.DOUBLE);
break;
case STRING:
builder.setAtomicType(SchemaApi.AtomicType.STRING);
break;
case BOOLEAN:
builder.setAtomicType(SchemaApi.AtomicType.BOOLEAN);
break;
case BYTES:
builder.setAtomicType(SchemaApi.AtomicType.BYTES);
break;
}
builder.setNullable(fieldType.getNullable());
return builder.build();
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class FieldTypeDescriptors method getArrayFieldType.
private static FieldType getArrayFieldType(TypeDescriptor typeDescriptor) {
if (typeDescriptor.isArray()) {
if (typeDescriptor.getComponentType().getType().equals(byte.class)) {
return FieldType.BYTES;
} else {
return FieldType.array(fieldTypeForJavaType(typeDescriptor.getComponentType()));
}
}
if (typeDescriptor.isSubtypeOf(TypeDescriptor.of(Collection.class))) {
TypeDescriptor<Collection<?>> collection = typeDescriptor.getSupertype(Collection.class);
if (collection.getType() instanceof ParameterizedType) {
ParameterizedType ptype = (ParameterizedType) collection.getType();
java.lang.reflect.Type[] params = ptype.getActualTypeArguments();
checkArgument(params.length == 1);
return FieldType.array(fieldTypeForJavaType(TypeDescriptor.of(params[0])));
}
}
throw new RuntimeException("Could not determine array parameter type for field.");
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class FieldAccessDescriptor method validateFieldDescriptor.
private static void validateFieldDescriptor(Schema schema, FieldDescriptor fieldDescriptor) {
Integer fieldId = fieldDescriptor.getFieldId();
if (fieldId != null) {
if (fieldId < 0 || fieldId >= schema.getFieldCount()) {
throw new IllegalArgumentException("Invalid field id " + fieldId + " for schema " + schema);
}
}
// If qualifiers were specified, validate them.
// For example, if a selector was a[*][*], then a needs to be a List of a List.
Field field = (fieldId != null) ? schema.getField(fieldId) : schema.getField(fieldDescriptor.getFieldName());
FieldType fieldType = field.getType();
for (Qualifier qualifier : fieldDescriptor.getQualifiers()) {
switch(qualifier.getKind()) {
case LIST:
checkArgument(qualifier.getList().equals(ListQualifier.ALL));
checkArgument(fieldType.getTypeName().isCollectionType());
fieldType = fieldType.getCollectionElementType();
break;
case MAP:
checkArgument(qualifier.getMap().equals(MapQualifier.ALL));
checkArgument(fieldType.getTypeName().equals(TypeName.MAP));
fieldType = fieldType.getMapValueType();
break;
default:
throw new IllegalStateException("Unexpected qualifier type " + qualifier.getKind());
}
}
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class AvroUtils method convertUnionStrict.
private static Object convertUnionStrict(Object value, org.apache.avro.Schema unionAvroSchema, Schema.FieldType fieldType) {
checkTypeName(fieldType.getTypeName(), TypeName.LOGICAL_TYPE, "oneOfType");
checkArgument(fieldType.getLogicalType().getIdentifier().equals(OneOfType.IDENTIFIER));
OneOfType oneOfType = fieldType.getLogicalType(OneOfType.class);
int fieldNumber = GenericData.get().resolveUnion(unionAvroSchema, value);
FieldType baseFieldType = oneOfType.getOneOfSchema().getField(fieldNumber).getType();
Object convertedValue = convertAvroFieldStrict(value, unionAvroSchema.getTypes().get(fieldNumber), baseFieldType);
return oneOfType.createValue(fieldNumber, convertedValue);
}
Aggregations