use of io.confluent.ksql.execution.plan.ExecutionStep in project ksql by confluentinc.
the class QueryRegistryImplTest method givenCreate.
private PersistentQueryMetadata givenCreate(final QueryRegistry registry, final String id, final String source, final Optional<String> sink, KsqlConstants.PersistentQueryType persistentQueryType) {
final QueryId queryId = new QueryId(id);
final PersistentQueryMetadata query = mock(PersistentQueryMetadataImpl.class);
final PersistentQueryMetadata newQuery = mock(BinPackedPersistentQueryMetadataImpl.class);
final DataSource sinkSource = mock(DataSource.class);
final ExecutionStep physicalPlan = mock(ExecutionStep.class);
sink.ifPresent(s -> {
when(sinkSource.getName()).thenReturn(SourceName.of(s));
when(query.getSinkName()).thenReturn(Optional.of(SourceName.of(s)));
when(newQuery.getSinkName()).thenReturn(Optional.of(SourceName.of(s)));
});
when(newQuery.getOverriddenProperties()).thenReturn(new HashMap<>());
when(newQuery.getQueryId()).thenReturn(queryId);
when(newQuery.getSink()).thenReturn(Optional.of(sinkSource));
when(newQuery.getSourceNames()).thenReturn(ImmutableSet.of(SourceName.of(source)));
when(newQuery.getPersistentQueryType()).thenReturn(persistentQueryType);
when(newQuery.getPhysicalPlan()).thenReturn(physicalPlan);
final SharedKafkaStreamsRuntime runtime = mock(SharedKafkaStreamsRuntimeImpl.class);
try {
Field sharedRuntime = BinPackedPersistentQueryMetadataImpl.class.getDeclaredField("sharedKafkaStreamsRuntime");
sharedRuntime.setAccessible(true);
sharedRuntime.set(newQuery, runtime);
} catch (NoSuchFieldException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
when(runtime.getNewQueryErrorQueue()).thenReturn(mock(QueryMetadataImpl.TimeBoundedQueue.class));
when(query.getQueryId()).thenReturn(queryId);
when(query.getSink()).thenReturn(Optional.of(sinkSource));
when(query.getSourceNames()).thenReturn(ImmutableSet.of(SourceName.of(source)));
when(query.getPersistentQueryType()).thenReturn(persistentQueryType);
when(query.getPhysicalPlan()).thenReturn(physicalPlan);
when(queryBuilder.buildPersistentQueryInSharedRuntime(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any())).thenReturn(newQuery);
when(queryBuilder.buildPersistentQueryInDedicatedRuntime(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any())).thenReturn(query);
when(config.getConfig(true)).thenReturn(ksqlConfig);
return registry.createOrReplacePersistentQuery(config, serviceContext, logContext, metaStore, "sql", queryId, Optional.of(sinkSource), ImmutableSet.of(toSource(source)), mock(ExecutionStep.class), "plan-summary", persistentQueryType, sharedRuntimes ? Optional.of("applicationId") : Optional.empty());
}
use of io.confluent.ksql.execution.plan.ExecutionStep in project ksql by confluentinc.
the class PlanSummary method summarize.
private StepSummary summarize(final ExecutionStep<?> step, final String indent) {
final StringBuilder stringBuilder = new StringBuilder();
final List<StepSummary> sourceSummaries = step.getSources().stream().map(s -> summarize(s, indent + "\t")).collect(Collectors.toList());
final String opName = OP_NAME.get(step.getClass());
if (opName == null) {
throw new UnsupportedOperationException("Unsupported step type: " + step.getClass() + ", please add a step type");
}
final LogicalSchema schema = getSchema(step, sourceSummaries);
stringBuilder.append(indent).append(" > [ ").append(opName).append(" ] | Schema: ").append(schema.toString(FORMAT_OPTIONS)).append(" | Logger: ").append(QueryLoggerUtil.queryLoggerName(queryId, step.getProperties().getQueryContext())).append("\n");
for (final StepSummary sourceSummary : sourceSummaries) {
stringBuilder.append("\t").append(indent).append(sourceSummary.summary);
}
return new StepSummary(schema, stringBuilder.toString());
}
use of io.confluent.ksql.execution.plan.ExecutionStep in project ksql by confluentinc.
the class SchemaKTableTest method buildSourceStep.
private ExecutionStep buildSourceStep(final LogicalSchema schema, final KTable kTable) {
final ExecutionStep sourceStep = mock(ExecutionStep.class);
when(sourceStep.build(any(), eq(planInfo))).thenReturn(KTableHolder.materialized(kTable, schema, executionKeyFactory, materializationBuilder));
return sourceStep;
}
use of io.confluent.ksql.execution.plan.ExecutionStep in project ksql by confluentinc.
the class SchemaKSourceFactory method buildTable.
private static SchemaKTable<?> buildTable(final PlanBuildContext buildContext, final DataSource dataSource, final Stacker contextStacker) {
final KeyFormat keyFormat = dataSource.getKsqlTopic().getKeyFormat();
if (keyFormat.isWindowed()) {
throw new IllegalArgumentException("windowed");
}
final SourceStep<KTableHolder<GenericKey>> step;
final int pseudoColumnVersionToUse = determinePseudoColumnVersionToUse(buildContext);
// If the old query has a v1 table step, continue to use it.
// See https://github.com/confluentinc/ksql/pull/7990
boolean useOldExecutionStepVersion = false;
if (buildContext.getPlanInfo().isPresent()) {
final Set<ExecutionStep<?>> sourceSteps = buildContext.getPlanInfo().get().getSources();
useOldExecutionStepVersion = sourceSteps.stream().anyMatch(executionStep -> executionStep instanceof TableSourceV1);
}
if (useOldExecutionStepVersion && pseudoColumnVersionToUse != SystemColumns.LEGACY_PSEUDOCOLUMN_VERSION_NUMBER) {
throw new IllegalStateException("TableSourceV2 was released in conjunction with pseudocolumn" + "version 1. Something has gone very wrong");
}
if (buildContext.getKsqlConfig().getBoolean(KsqlConfig.KSQL_ROWPARTITION_ROWOFFSET_ENABLED) && !useOldExecutionStepVersion) {
step = ExecutionStepFactory.tableSource(contextStacker, dataSource.getSchema(), dataSource.getKafkaTopicName(), Formats.from(dataSource.getKsqlTopic()), dataSource.getTimestampColumn(), InternalFormats.of(keyFormat, Formats.from(dataSource.getKsqlTopic()).getValueFormat()), pseudoColumnVersionToUse);
} else {
step = ExecutionStepFactory.tableSourceV1(contextStacker, dataSource.getSchema(), dataSource.getKafkaTopicName(), Formats.from(dataSource.getKsqlTopic()), dataSource.getTimestampColumn(), pseudoColumnVersionToUse);
}
return schemaKTable(buildContext, resolveSchema(buildContext, step, dataSource), dataSource.getKsqlTopic().getKeyFormat(), step);
}
Aggregations