use of org.apache.beam.sdk.annotations.Experimental in project beam by apache.
the class ProtoByteBuddyUtils method createBuilderCreator.
@Experimental(Kind.SCHEMAS)
static <ProtoBuilderT extends MessageLite.Builder> SchemaUserTypeCreator createBuilderCreator(Class<?> protoClass, Class<?> builderClass, List<FieldValueSetter<ProtoBuilderT, Object>> setters, Schema schema) {
try {
DynamicType.Builder<Supplier> builder = BYTE_BUDDY.with(new InjectPackageStrategy(builderClass)).subclass(Supplier.class).method(ElementMatchers.named("get")).intercept(new BuilderSupplier(protoClass));
Supplier supplier = builder.visit(new AsmVisitorWrapper.ForDeclaredMethods().writerFlags(ClassWriter.COMPUTE_FRAMES)).make().load(ReflectHelpers.findClassLoader(), ClassLoadingStrategy.Default.INJECTION).getLoaded().getDeclaredConstructor().newInstance();
return new ProtoCreatorFactory<>(supplier, setters);
} catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
throw new RuntimeException("Unable to generate a creator for class " + builderClass + " with schema " + schema);
}
}
use of org.apache.beam.sdk.annotations.Experimental in project beam by apache.
the class TestBigQuery method insertRows.
@Experimental(Kind.SCHEMAS)
public TableDataInsertAllResponse insertRows(Schema rowSchema, Row... rows) throws IOException {
List<Rows> bqRows = Arrays.stream(rows).map(row -> new Rows().setJson(BigQueryUtils.toTableRow(row))).collect(ImmutableList.toImmutableList());
Bigquery bq = newBigQueryClient(pipelineOptions);
return bq.tabledata().insertAll(pipelineOptions.getBigQueryProject() == null ? pipelineOptions.getProject() : pipelineOptions.getBigQueryProject(), pipelineOptions.getTargetDataset(), table.getTableReference().getTableId(), new TableDataInsertAllRequest().setRows(bqRows)).setPrettyPrint(false).execute();
}
use of org.apache.beam.sdk.annotations.Experimental in project beam by apache.
the class PubsubIO method readAvroGenericRecords.
/**
* Returns a {@link PTransform} that continuously reads binary encoded Avro messages into the Avro
* {@link GenericRecord} type.
*
* <p>Beam will infer a schema for the Avro schema. This allows the output to be used by SQL and
* by the schema-transform library.
*/
@Experimental(Kind.SCHEMAS)
public static Read<GenericRecord> readAvroGenericRecords(org.apache.avro.Schema avroSchema) {
Schema schema = AvroUtils.getSchema(GenericRecord.class, avroSchema);
AvroCoder<GenericRecord> coder = AvroCoder.of(GenericRecord.class, avroSchema);
return Read.newBuilder(parsePayloadUsingCoder(coder)).setCoder(SchemaCoder.of(schema, TypeDescriptor.of(GenericRecord.class), AvroUtils.getToRowFunction(GenericRecord.class, avroSchema), AvroUtils.getFromRowFunction(GenericRecord.class))).build();
}
use of org.apache.beam.sdk.annotations.Experimental in project beam by apache.
the class PubsubIO method readAvrosWithBeamSchema.
/**
* Returns a {@link PTransform} that continuously reads binary encoded Avro messages of the
* specific type.
*
* <p>Beam will infer a schema for the Avro schema. This allows the output to be used by SQL and
* by the schema-transform library.
*/
@Experimental(Kind.SCHEMAS)
public static <T> Read<T> readAvrosWithBeamSchema(Class<T> clazz) {
if (clazz.equals(GenericRecord.class)) {
throw new IllegalArgumentException("For GenericRecord, please call readAvroGenericRecords");
}
org.apache.avro.Schema avroSchema = ReflectData.get().getSchema(clazz);
AvroCoder<T> coder = AvroCoder.of(clazz);
Schema schema = AvroUtils.getSchema(clazz, null);
return Read.newBuilder(parsePayloadUsingCoder(coder)).setCoder(SchemaCoder.of(schema, TypeDescriptor.of(clazz), AvroUtils.getToRowFunction(clazz, avroSchema), AvroUtils.getFromRowFunction(clazz))).build();
}
use of org.apache.beam.sdk.annotations.Experimental in project beam by apache.
the class BigQueryUtils method fromTableFieldSchemaType.
/**
* Get the Beam {@link FieldType} from a BigQuery type name.
*
* <p>Supports both standard and legacy SQL types.
*
* @param typeName Name of the type
* @param nestedFields Nested fields for the given type (eg. RECORD type)
* @return Corresponding Beam {@link FieldType}
*/
@Experimental(Kind.SCHEMAS)
private static FieldType fromTableFieldSchemaType(String typeName, List<TableFieldSchema> nestedFields, SchemaConversionOptions options) {
switch(typeName) {
case "STRING":
return FieldType.STRING;
case "BYTES":
return FieldType.BYTES;
case "INT64":
case "INTEGER":
return FieldType.INT64;
case "FLOAT64":
case "FLOAT":
return FieldType.DOUBLE;
case "BOOL":
case "BOOLEAN":
return FieldType.BOOLEAN;
case "NUMERIC":
return FieldType.DECIMAL;
case "TIMESTAMP":
return FieldType.DATETIME;
case "TIME":
return FieldType.logicalType(SqlTypes.TIME);
case "DATE":
return FieldType.logicalType(SqlTypes.DATE);
case "DATETIME":
return FieldType.logicalType(SqlTypes.DATETIME);
case "STRUCT":
case "RECORD":
if (options.getInferMaps() && nestedFields.size() == 2) {
TableFieldSchema key = nestedFields.get(0);
TableFieldSchema value = nestedFields.get(1);
if (BIGQUERY_MAP_KEY_FIELD_NAME.equals(key.getName()) && BIGQUERY_MAP_VALUE_FIELD_NAME.equals(value.getName())) {
return FieldType.map(fromTableFieldSchemaType(key.getType(), key.getFields(), options), fromTableFieldSchemaType(value.getType(), value.getFields(), options));
}
}
Schema rowSchema = fromTableFieldSchema(nestedFields, options);
return FieldType.row(rowSchema);
default:
throw new UnsupportedOperationException("Converting BigQuery type " + typeName + " to Beam type is unsupported");
}
}
Aggregations