use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class SchemaUtils method widenNullableTypes.
static FieldType widenNullableTypes(FieldType fieldType1, FieldType fieldType2) {
if (fieldType1.getTypeName() != fieldType2.getTypeName()) {
throw new IllegalArgumentException("Cannot merge two types: " + fieldType1.getTypeName() + " and " + fieldType2.getTypeName());
}
FieldType result;
switch(fieldType1.getTypeName()) {
case ROW:
result = FieldType.row(mergeWideningNullable(fieldType1.getRowSchema(), fieldType2.getRowSchema()));
break;
case ARRAY:
FieldType arrayElementType = widenNullableTypes(fieldType1.getCollectionElementType(), fieldType2.getCollectionElementType());
result = FieldType.array(arrayElementType);
break;
case ITERABLE:
FieldType iterableElementType = widenNullableTypes(fieldType1.getCollectionElementType(), fieldType2.getCollectionElementType());
result = FieldType.iterable(iterableElementType);
break;
case MAP:
FieldType keyType = widenNullableTypes(fieldType1.getMapKeyType(), fieldType2.getMapKeyType());
FieldType valueType = widenNullableTypes(fieldType1.getMapValueType(), fieldType2.getMapValueType());
result = FieldType.map(keyType, valueType);
break;
case LOGICAL_TYPE:
if (!fieldType1.getLogicalType().getIdentifier().equals(fieldType2.getLogicalType().getIdentifier())) {
throw new IllegalArgumentException("Logical types don't match and cannot be merged: " + fieldType1.getLogicalType().getIdentifier() + ".v.s" + fieldType2.getLogicalType().getIdentifier());
}
// fall through
default:
result = fieldType1;
}
return result.withNullable(fieldType1.getNullable() || fieldType2.getNullable());
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class CalciteUtils method toRelDataType.
public static RelDataType toRelDataType(RelDataTypeFactory dataTypeFactory, FieldType fieldType) {
switch(fieldType.getTypeName()) {
case ARRAY:
case ITERABLE:
FieldType collectionElementType = fieldType.getCollectionElementType();
Preconditions.checkArgumentNotNull(collectionElementType);
return dataTypeFactory.createArrayType(toRelDataType(dataTypeFactory, collectionElementType), UNLIMITED_ARRAY_SIZE);
case MAP:
FieldType mapKeyType = fieldType.getMapKeyType();
FieldType mapValueType = fieldType.getMapValueType();
Preconditions.checkArgumentNotNull(mapKeyType);
Preconditions.checkArgumentNotNull(mapValueType);
RelDataType componentKeyType = toRelDataType(dataTypeFactory, mapKeyType);
RelDataType componentValueType = toRelDataType(dataTypeFactory, mapValueType);
return dataTypeFactory.createMapType(componentKeyType, componentValueType);
case ROW:
Schema schema = fieldType.getRowSchema();
Preconditions.checkArgumentNotNull(schema);
return toCalciteRowType(schema, dataTypeFactory);
default:
return dataTypeFactory.createSqlType(toSqlTypeName(fieldType));
}
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class SchemaUtil method createFieldExtractor.
/**
* Creates a {@link ResultSetFieldExtractor} for the given type.
*/
private static ResultSetFieldExtractor createFieldExtractor(Schema.FieldType fieldType) {
Schema.TypeName typeName = fieldType.getTypeName();
switch(typeName) {
case ARRAY:
case ITERABLE:
Schema.FieldType elementType = fieldType.getCollectionElementType();
ResultSetFieldExtractor elementExtractor = createFieldExtractor(elementType);
return createArrayExtractor(elementExtractor);
case DATETIME:
return TIMESTAMP_EXTRACTOR;
case LOGICAL_TYPE:
return createLogicalTypeExtractor(fieldType.getLogicalType());
default:
if (!RESULTSET_FIELD_EXTRACTORS.containsKey(typeName)) {
throw new UnsupportedOperationException("BeamRowMapper does not have support for fields of type " + fieldType);
}
return RESULTSET_FIELD_EXTRACTORS.get(typeName);
}
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class SchemaUtil method createLogicalTypeExtractor.
/**
* Creates a {@link ResultSetFieldExtractor} for logical types.
*/
private static <InputT, BaseT> ResultSetFieldExtractor createLogicalTypeExtractor(final Schema.LogicalType<InputT, BaseT> fieldType) {
String logicalTypeName = fieldType.getIdentifier();
if (Objects.equals(fieldType, LogicalTypes.JDBC_UUID_TYPE.getLogicalType())) {
return OBJECT_EXTRACTOR;
}
JDBCType underlyingType = JDBCType.valueOf(logicalTypeName);
switch(underlyingType) {
case DATE:
return DATE_EXTRACTOR;
case TIME:
return TIME_EXTRACTOR;
case TIMESTAMP_WITH_TIMEZONE:
return TIMESTAMP_EXTRACTOR;
default:
ResultSetFieldExtractor extractor = createFieldExtractor(fieldType.getBaseType());
return (rs, index) -> fieldType.toInputType((BaseT) extractor.extract(rs, index));
}
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class SchemaUtils method toBeamField.
private static Field toBeamField(ColumnSchema column) {
String name = column.getColumn();
// basic field type
FieldType fieldType = getBeamFieldType(column);
Field field = Field.of(name, fieldType);
// set the nullable flag, or convert to a list if repeated
if (Strings.isNullOrEmpty(column.getMode()) || "NULLABLE".equals(column.getMode())) {
field = field.withNullable(true);
} else if ("REQUIRED".equals(column.getMode())) {
field = field.withNullable(false);
} else if ("REPEATED".equals(column.getMode())) {
field = Field.of(name, FieldType.array(fieldType));
} else {
throw new UnsupportedOperationException("Field mode '" + column.getMode() + "' is not supported (field '" + name + "')");
}
return field;
}
Aggregations