use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.
the class LogicalPlanner method buildOutputNode.
private OutputNode buildOutputNode(final PlanNode sourcePlanNode) {
final LogicalSchema inputSchema = sourcePlanNode.getSchema();
final Optional<TimestampColumn> timestampColumn = getTimestampColumn(inputSchema, analysis);
if (!analysis.getInto().isPresent()) {
return new KsqlBareOutputNode(new PlanNodeId("KSQL_STDOUT_NAME"), sourcePlanNode, inputSchema, analysis.getLimitClause(), timestampColumn, getWindowInfo());
}
final Into into = analysis.getInto().get();
final KsqlTopic existingTopic = getSinkTopic(into, sourcePlanNode.getSchema());
return new KsqlStructuredDataOutputNode(new PlanNodeId(into.getName().text()), sourcePlanNode, inputSchema, timestampColumn, existingTopic, analysis.getLimitClause(), into.isCreate(), into.getName(), analysis.getOrReplace());
}
use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.
the class LogicalPlanner method getSinkTopic.
private KsqlTopic getSinkTopic(final Into into, final LogicalSchema schema) {
if (into.getExistingTopic().isPresent()) {
return into.getExistingTopic().get();
}
final NewTopic newTopic = into.getNewTopic().orElseThrow(IllegalStateException::new);
final FormatInfo keyFormat = getSinkKeyFormat(schema, newTopic);
final SerdeFeatures keyFeatures = SerdeFeaturesFactory.buildKeyFeatures(schema, FormatFactory.of(keyFormat));
final SerdeFeatures valFeatures = SerdeFeaturesFactory.buildValueFeatures(schema, FormatFactory.of(newTopic.getValueFormat()), analysis.getProperties().getValueSerdeFeatures(), ksqlConfig);
return new KsqlTopic(newTopic.getTopicName(), KeyFormat.of(keyFormat, keyFeatures, newTopic.getWindowInfo()), ValueFormat.of(newTopic.getValueFormat(), valFeatures));
}
use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.
the class ScalablePushRegistryTest method setUp.
@Before
public void setUp() {
when(ksqlTopic.getKafkaTopicName()).thenReturn(TOPIC);
when(kafkaConsumerFactory.create(any(), any(), any(), any(), any(), any())).thenReturn(kafkaConsumer);
catchupCoordinator = new TestCatchupCoordinator();
latestConsumer = new TestLatestConsumer(TOPIC, false, SCHEMA, kafkaConsumer, catchupCoordinator, assignment -> {
}, ksqlConfig, Clock.systemUTC());
latestConsumer2 = new TestLatestConsumer(TOPIC, false, SCHEMA, kafkaConsumer, catchupCoordinator, assignment -> {
}, ksqlConfig, Clock.systemUTC());
catchupConsumer = new TestCatchupConsumer(TOPIC, false, SCHEMA, kafkaConsumer, () -> latestConsumer, catchupCoordinator, pushOffsetRange, Clock.systemUTC(), pq -> {
});
when(latestConsumerFactory.create(any(), anyBoolean(), any(), any(), any(), any(), any(), any())).thenReturn(latestConsumer, latestConsumer2);
when(catchupConsumerFactory.create(any(), anyBoolean(), any(), any(), any(), any(), any(), any(), anyLong(), any())).thenReturn(catchupConsumer);
when(ksqlTopic.getKeyFormat()).thenReturn(keyFormat);
when(keyFormat.isWindowed()).thenReturn(false);
realExecutorService = Executors.newFixedThreadPool(2);
doAnswer(a -> {
final Runnable runnable = a.getArgument(0);
startLatestRunnable.set(runnable);
realExecutorService.submit(runnable);
return null;
}).when(executorService).submit(any(Runnable.class));
doAnswer(a -> {
final Runnable runnable = a.getArgument(0);
realExecutorService.submit(runnable);
return null;
}).when(catchupService).submit(any(Runnable.class));
when(processingQueue.getQueryId()).thenReturn(new QueryId("q1"));
when(processingQueue2.getQueryId()).thenReturn(new QueryId("q2"));
registry = new ScalablePushRegistry(locator, SCHEMA, false, ImmutableMap.of(), ksqlTopic, serviceContext, ksqlConfig, SOURCE_APP_ID, kafkaConsumerFactory, latestConsumerFactory, catchupConsumerFactory, executorService, catchupService);
when(ksqlConfig.getInt(KsqlConfig.KSQL_QUERY_PUSH_V2_MAX_CATCHUP_CONSUMERS)).thenReturn(10);
}
Aggregations