use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class BeamRowToStorageApiProto method toProtoValue.
private static Object toProtoValue(FieldDescriptor fieldDescriptor, FieldType beamFieldType, Object value) {
switch(beamFieldType.getTypeName()) {
case ROW:
return messageFromBeamRow(fieldDescriptor.getMessageType(), (Row) value);
case ARRAY:
List<Object> list = (List<Object>) value;
@Nullable FieldType arrayElementType = beamFieldType.getCollectionElementType();
if (arrayElementType == null) {
throw new RuntimeException("Unexpected null element type!");
}
return list.stream().map(v -> toProtoValue(fieldDescriptor, arrayElementType, v)).collect(Collectors.toList());
case ITERABLE:
Iterable<Object> iterable = (Iterable<Object>) value;
@Nullable FieldType iterableElementType = beamFieldType.getCollectionElementType();
if (iterableElementType == null) {
throw new RuntimeException("Unexpected null element type!");
}
return StreamSupport.stream(iterable.spliterator(), false).map(v -> toProtoValue(fieldDescriptor, iterableElementType, v)).collect(Collectors.toList());
case MAP:
throw new RuntimeException("Map types not supported by BigQuery.");
default:
return scalarToProtoValue(beamFieldType, value);
}
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class JdbcIOTest method testGetPreparedStatementSetCallerForLogicalTypes.
@Test
public void testGetPreparedStatementSetCallerForLogicalTypes() throws Exception {
FieldType fixedLengthStringType = LogicalTypes.fixedLengthString(JDBCType.VARCHAR, 4);
Schema schema = Schema.builder().addField("logical_date_col", LogicalTypes.JDBC_DATE_TYPE).addField("logical_time_col", LogicalTypes.JDBC_TIME_TYPE).addField("logical_time_with_tz_col", LogicalTypes.JDBC_TIMESTAMP_WITH_TIMEZONE_TYPE).addField("logical_fixed_length_string_col", fixedLengthStringType).addField("logical_fixed_length_string_nullable_col", fixedLengthStringType.withNullable(true)).addField("logical_uuid_col", LogicalTypes.JDBC_UUID_TYPE).addField("logical_other_col", LogicalTypes.OTHER_AS_STRING_TYPE).build();
long epochMilli = 1558719710000L;
DateTime dateTime = new DateTime(epochMilli, ISOChronology.getInstanceUTC());
DateTime time = new DateTime(34567000L, /* value must be less than num millis in one day */
ISOChronology.getInstanceUTC());
Row row = Row.withSchema(schema).addValues(dateTime.withTimeAtStartOfDay(), time, dateTime, "Test", null, UUID.randomUUID(), "{}").build();
PreparedStatement psMocked = mock(PreparedStatement.class);
JdbcUtil.getPreparedStatementSetCaller(LogicalTypes.JDBC_DATE_TYPE).set(row, psMocked, 0, SchemaUtil.FieldWithIndex.of(schema.getField(0), 0));
JdbcUtil.getPreparedStatementSetCaller(LogicalTypes.JDBC_TIME_TYPE).set(row, psMocked, 1, SchemaUtil.FieldWithIndex.of(schema.getField(1), 1));
JdbcUtil.getPreparedStatementSetCaller(LogicalTypes.JDBC_TIMESTAMP_WITH_TIMEZONE_TYPE).set(row, psMocked, 2, SchemaUtil.FieldWithIndex.of(schema.getField(2), 2));
JdbcUtil.getPreparedStatementSetCaller(fixedLengthStringType).set(row, psMocked, 3, SchemaUtil.FieldWithIndex.of(schema.getField(3), 3));
JdbcUtil.getPreparedStatementSetCaller(fixedLengthStringType.withNullable(true)).set(row, psMocked, 4, SchemaUtil.FieldWithIndex.of(schema.getField(4), 4));
JdbcUtil.getPreparedStatementSetCaller(LogicalTypes.JDBC_UUID_TYPE).set(row, psMocked, 5, SchemaUtil.FieldWithIndex.of(schema.getField(5), 5));
JdbcUtil.getPreparedStatementSetCaller(LogicalTypes.OTHER_AS_STRING_TYPE).set(row, psMocked, 6, SchemaUtil.FieldWithIndex.of(schema.getField(6), 6));
verify(psMocked, times(1)).setDate(1, new Date(row.getDateTime(0).getMillis()));
verify(psMocked, times(1)).setTime(2, new Time(row.getDateTime(1).getMillis()));
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
cal.setTimeInMillis(epochMilli);
verify(psMocked, times(1)).setTimestamp(3, new Timestamp(cal.getTime().getTime()), cal);
verify(psMocked, times(1)).setString(4, row.getString(3));
verify(psMocked, times(1)).setString(5, row.getString(4));
verify(psMocked, times(1)).setObject(6, row.getLogicalTypeValue(5, UUID.class));
verify(psMocked, times(1)).setObject(7, row.getString(6), java.sql.Types.OTHER);
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class SchemaUtils method toBeamField.
private static Schema.Field toBeamField(FieldSchema field) {
String name = field.getName();
HCatFieldSchema hCatFieldSchema;
try {
hCatFieldSchema = HCatSchemaUtils.getHCatFieldSchema(field);
} catch (HCatException e) {
// Converting checked Exception to unchecked Exception.
throw new UnsupportedOperationException("Error while converting FieldSchema to HCatFieldSchema", e);
}
switch(hCatFieldSchema.getCategory()) {
case PRIMITIVE:
{
if (!HCAT_TO_BEAM_TYPES_MAP.containsKey(hCatFieldSchema.getType())) {
throw new UnsupportedOperationException("The Primitive HCat type '" + field.getType() + "' of field '" + name + "' cannot be converted to Beam FieldType");
}
FieldType fieldType = HCAT_TO_BEAM_TYPES_MAP.get(hCatFieldSchema.getType());
return Schema.Field.of(name, fieldType).withNullable(true);
}
// TODO: Add Support for Complex Types i.e. ARRAY, MAP, STRUCT
default:
throw new UnsupportedOperationException("The category '" + hCatFieldSchema.getCategory() + "' is not supported.");
}
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class DoFnSchemaInformation method withUnboxPrimitiveParameter.
/**
* Specified a parameter that is a selection from an input schema (specified using FieldAccess).
* This method is called when the input parameter is a Java type that does not itself have a
* schema, e.g. long, or String. In this case we expect the selection predicate to return a
* single-field row with a field of the output type.
*
* @param inputCoder The coder for the ParDo's input elements.
* @param selectDescriptor The descriptor describing which field to select.
* @param selectOutputSchema The schema of the selected parameter.
* @param elementT The type of the method's input parameter.
* @return
*/
DoFnSchemaInformation withUnboxPrimitiveParameter(SchemaCoder inputCoder, FieldAccessDescriptor selectDescriptor, Schema selectOutputSchema, TypeDescriptor<?> elementT) {
if (selectOutputSchema.getFieldCount() != 1) {
throw new RuntimeException("Parameter has no schema and the input is not a simple type.");
}
FieldType fieldType = selectOutputSchema.getField(0).getType();
if (fieldType.getTypeName().isCompositeType()) {
throw new RuntimeException("Parameter has no schema and the input is not a primitive type.");
}
List<SerializableFunction<?, ?>> converters = ImmutableList.<SerializableFunction<?, ?>>builder().addAll(getElementConverters()).add(UnboxingConversionFunction.of(inputCoder.getSchema(), inputCoder.getToRowFunction(), selectDescriptor, selectOutputSchema, elementT)).build();
return toBuilder().setElementConverters(converters).build();
}
use of org.apache.beam.sdk.schemas.Schema.FieldType in project beam by apache.
the class RowCoderTest method testConsistentWithEqualsMapWithBytesKeyField.
@Test
@Ignore
public void testConsistentWithEqualsMapWithBytesKeyField() throws Exception {
FieldType fieldType = FieldType.map(FieldType.BYTES, FieldType.INT32);
Schema schema = Schema.of(Schema.Field.of("f1", fieldType));
RowCoder coder = RowCoder.of(schema);
Map<byte[], Integer> map1 = Collections.singletonMap(new byte[] { 1, 2, 3, 4 }, 1);
Row row1 = Row.withSchema(schema).addValue(map1).build();
Map<byte[], Integer> map2 = Collections.singletonMap(new byte[] { 1, 2, 3, 4 }, 1);
Row row2 = Row.withSchema(schema).addValue(map2).build();
Assume.assumeTrue(coder.consistentWithEquals());
CoderProperties.coderConsistentWithEquals(coder, row1, row2);
}
Aggregations