use of io.confluent.ksql.api.server.InsertResult in project ksql by confluentinc.
the class InsertsSubscriber method createInsertsSubscriber.
public static InsertsSubscriber createInsertsSubscriber(final ServiceContext serviceContext, final JsonObject properties, final DataSource dataSource, final KsqlConfig ksqlConfig, final Context context, final Subscriber<InsertResult> acksSubscriber, final WorkerExecutor workerExecutor) {
final KsqlConfig configCopy = ksqlConfig.cloneWithPropertyOverwrite(properties.getMap());
final Producer<byte[], byte[]> producer = serviceContext.getKafkaClientSupplier().getProducer(configCopy.originals());
final PhysicalSchema physicalSchema = PhysicalSchema.from(dataSource.getSchema(), dataSource.getKsqlTopic().getKeyFormat().getFeatures(), dataSource.getKsqlTopic().getValueFormat().getFeatures());
final KeySerdeFactory keySerdeFactory = new GenericKeySerDe();
final Serde<GenericKey> keySerde = keySerdeFactory.create(dataSource.getKsqlTopic().getKeyFormat().getFormatInfo(), physicalSchema.keySchema(), ksqlConfig, serviceContext.getSchemaRegistryClientFactory(), "", NoopProcessingLogContext.INSTANCE, Optional.empty());
final ValueSerdeFactory valueSerdeFactory = new GenericRowSerDe();
final Serde<GenericRow> valueSerde = valueSerdeFactory.create(dataSource.getKsqlTopic().getValueFormat().getFormatInfo(), physicalSchema.valueSchema(), ksqlConfig, serviceContext.getSchemaRegistryClientFactory(), "", NoopProcessingLogContext.INSTANCE, Optional.empty());
final BufferedPublisher<InsertResult> acksPublisher = new BufferedPublisher<>(context);
acksPublisher.subscribe(acksSubscriber);
return new InsertsSubscriber(context, producer, dataSource, keySerde.serializer(), valueSerde.serializer(), acksPublisher, workerExecutor);
}
Aggregations