use of org.apache.beam.sdk.extensions.sql.meta.BeamSqlTable in project beam by apache.
the class KafkaTableProvider method buildBeamSqlTable.
@Override
public BeamSqlTable buildBeamSqlTable(Table table) {
Schema schema = table.getSchema();
JSONObject properties = table.getProperties();
Optional<ParsedLocation> parsedLocation = Optional.empty();
if (!Strings.isNullOrEmpty(table.getLocation())) {
parsedLocation = Optional.of(parseLocation(checkArgumentNotNull(table.getLocation())));
}
List<String> topics = mergeParam(parsedLocation.map(loc -> loc.topic), properties.getJSONArray("topics"));
List<String> allBootstrapServers = mergeParam(parsedLocation.map(loc -> loc.brokerLocation), properties.getJSONArray("bootstrap_servers"));
String bootstrapServers = String.join(",", allBootstrapServers);
Optional<String> payloadFormat = properties.containsKey("format") ? Optional.of(properties.getString("format")) : Optional.empty();
if (Schemas.isNestedSchema(schema)) {
Optional<PayloadSerializer> serializer = payloadFormat.map(format -> PayloadSerializers.getSerializer(format, checkArgumentNotNull(schema.getField(PAYLOAD_FIELD).getType().getRowSchema()), properties.getInnerMap()));
return new NestedPayloadKafkaTable(schema, bootstrapServers, topics, serializer);
} else {
/*
* CSV is handled separately because multiple rows can be produced from a single message, which
* adds complexity to payload extraction. It remains here and as the default because it is the
* historical default, but it will not be extended to support attaching extended attributes to
* rows.
*/
if (payloadFormat.orElse("csv").equals("csv")) {
return new BeamKafkaCSVTable(schema, bootstrapServers, topics);
}
PayloadSerializer serializer = PayloadSerializers.getSerializer(payloadFormat.get(), schema, properties.getInnerMap());
return new PayloadSerializerKafkaTable(schema, bootstrapServers, topics, serializer);
}
}
Aggregations