Search in sources :

Example 1 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class QueryBuilderTest method shouldBuildCreateAsPersistentQueryCorrectly.

@Test
public void shouldBuildCreateAsPersistentQueryCorrectly() {
    // Given:
    final ProcessingLogger uncaughtProcessingLogger = mock(ProcessingLogger.class);
    when(processingLoggerFactory.getLogger(QueryLoggerUtil.queryLoggerName(QUERY_ID, new QueryContext.Stacker().push("ksql.logger.thread.exception.uncaught").getQueryContext()))).thenReturn(uncaughtProcessingLogger);
    // When:
    final PersistentQueryMetadata queryMetadata = buildPersistentQuery(SOURCES, KsqlConstants.PersistentQueryType.CREATE_AS, QUERY_ID);
    queryMetadata.initialize();
    // Then:
    assertThat(queryMetadata.getStatementString(), equalTo(STATEMENT_TEXT));
    assertThat(queryMetadata.getQueryId(), equalTo(QUERY_ID));
    assertThat(queryMetadata.getSinkName().get(), equalTo(SINK_NAME));
    assertThat(queryMetadata.getPhysicalSchema(), equalTo(SINK_PHYSICAL_SCHEMA));
    assertThat(queryMetadata.getResultTopic(), is(Optional.of(ksqlTopic)));
    assertThat(queryMetadata.getSourceNames(), equalTo(SOURCES.stream().map(DataSource::getName).collect(Collectors.toSet())));
    assertThat(queryMetadata.getDataSourceType().get(), equalTo(DataSourceType.KSTREAM));
    assertThat(queryMetadata.getExecutionPlan(), equalTo(SUMMARY));
    assertThat(queryMetadata.getTopology(), is(topology));
    assertThat(queryMetadata.getOverriddenProperties(), equalTo(OVERRIDES));
    assertThat(queryMetadata.getStreamsProperties(), equalTo(capturedStreamsProperties()));
    assertThat(queryMetadata.getProcessingLogger(), equalTo(uncaughtProcessingLogger));
    assertThat(queryMetadata.getPersistentQueryType(), equalTo(KsqlConstants.PersistentQueryType.CREATE_AS));
}
Also used : ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) Stacker(io.confluent.ksql.execution.context.QueryContext.Stacker) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) Test(org.junit.Test)

Example 2 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class QueryBuilderTest method shouldBuildInsertPersistentQueryCorrectly.

@Test
public void shouldBuildInsertPersistentQueryCorrectly() {
    // Given:
    final ProcessingLogger uncaughtProcessingLogger = mock(ProcessingLogger.class);
    when(processingLoggerFactory.getLogger(QueryLoggerUtil.queryLoggerName(QUERY_ID, new QueryContext.Stacker().push("ksql.logger.thread.exception.uncaught").getQueryContext()))).thenReturn(uncaughtProcessingLogger);
    // When:
    final PersistentQueryMetadata queryMetadata = buildPersistentQuery(SOURCES, KsqlConstants.PersistentQueryType.INSERT, QUERY_ID);
    queryMetadata.initialize();
    // Then:
    assertThat(queryMetadata.getStatementString(), equalTo(STATEMENT_TEXT));
    assertThat(queryMetadata.getQueryId(), equalTo(QUERY_ID));
    assertThat(queryMetadata.getSinkName().get(), equalTo(SINK_NAME));
    assertThat(queryMetadata.getPhysicalSchema(), equalTo(SINK_PHYSICAL_SCHEMA));
    assertThat(queryMetadata.getResultTopic(), is(Optional.of(ksqlTopic)));
    assertThat(queryMetadata.getSourceNames(), equalTo(SOURCES.stream().map(DataSource::getName).collect(Collectors.toSet())));
    assertThat(queryMetadata.getDataSourceType().get(), equalTo(DataSourceType.KSTREAM));
    assertThat(queryMetadata.getExecutionPlan(), equalTo(SUMMARY));
    assertThat(queryMetadata.getTopology(), is(topology));
    assertThat(queryMetadata.getOverriddenProperties(), equalTo(OVERRIDES));
    assertThat(queryMetadata.getStreamsProperties(), equalTo(capturedStreamsProperties()));
    assertThat(queryMetadata.getProcessingLogger(), equalTo(uncaughtProcessingLogger));
    assertThat(queryMetadata.getPersistentQueryType(), equalTo(KsqlConstants.PersistentQueryType.INSERT));
}
Also used : ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) Stacker(io.confluent.ksql.execution.context.QueryContext.Stacker) PersistentQueryMetadata(io.confluent.ksql.util.PersistentQueryMetadata) Test(org.junit.Test)

Example 3 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class TableGroupByBuilderBase method build.

public <K> KGroupedTableHolder build(final KTableHolder<K> table, final QueryContext queryContext, final Formats formats, final List<Expression> groupByExpressions) {
    final LogicalSchema sourceSchema = table.getSchema();
    final List<CompiledExpression> groupBy = CodeGenRunner.compileExpressions(groupByExpressions.stream(), "Group By", sourceSchema, buildContext.getKsqlConfig(), buildContext.getFunctionRegistry());
    final ProcessingLogger logger = buildContext.getProcessingLogger(queryContext);
    final GroupByParams params = paramsFactory.build(sourceSchema, groupBy, logger);
    final PhysicalSchema physicalSchema = PhysicalSchema.from(params.getSchema(), formats.getKeyFeatures(), formats.getValueFeatures());
    final Serde<GenericKey> keySerde = buildContext.buildKeySerde(formats.getKeyFormat(), physicalSchema, queryContext);
    final Serde<GenericRow> valSerde = buildContext.buildValueSerde(formats.getValueFormat(), physicalSchema, queryContext);
    final Grouped<GenericKey, GenericRow> grouped = groupedFactory.create(StreamsUtil.buildOpName(queryContext), keySerde, valSerde);
    final KGroupedTable<GenericKey, GenericRow> groupedTable = table.getTable().filter((k, v) -> v != null).groupBy(new TableKeyValueMapper<>(params.getMapper()), grouped);
    return KGroupedTableHolder.of(groupedTable, params.getSchema());
}
Also used : PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) KeyValueMapper(org.apache.kafka.streams.kstream.KeyValueMapper) RuntimeBuildContext(io.confluent.ksql.execution.runtime.RuntimeBuildContext) Expression(io.confluent.ksql.execution.expression.tree.Expression) QueryContext(io.confluent.ksql.execution.context.QueryContext) KeyValue(org.apache.kafka.streams.KeyValue) CompiledExpression(io.confluent.ksql.execution.codegen.CompiledExpression) Formats(io.confluent.ksql.execution.plan.Formats) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Function(java.util.function.Function) CodeGenRunner(io.confluent.ksql.execution.codegen.CodeGenRunner) Grouped(org.apache.kafka.streams.kstream.Grouped) KGroupedTableHolder(io.confluent.ksql.execution.plan.KGroupedTableHolder) KTableHolder(io.confluent.ksql.execution.plan.KTableHolder) List(java.util.List) KGroupedTable(org.apache.kafka.streams.kstream.KGroupedTable) GenericRow(io.confluent.ksql.GenericRow) Serde(org.apache.kafka.common.serialization.Serde) Objects.requireNonNull(java.util.Objects.requireNonNull) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) GenericKey(io.confluent.ksql.GenericKey) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) CompiledExpression(io.confluent.ksql.execution.codegen.CompiledExpression) GenericRow(io.confluent.ksql.GenericRow) PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) GenericKey(io.confluent.ksql.GenericKey)

Example 4 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class TableSelectBuilder method build.

@SuppressWarnings("unchecked")
public static <K> KTableHolder<K> build(final KTableHolder<K> table, final TableSelect<K> step, final RuntimeBuildContext buildContext, final Optional<Formats> formats, final MaterializedFactory materializedFactory) {
    final LogicalSchema sourceSchema = table.getSchema();
    final QueryContext queryContext = step.getProperties().getQueryContext();
    final Selection<K> selection = Selection.of(sourceSchema, step.getKeyColumnNames(), step.getSelectExpressions(), buildContext.getKsqlConfig(), buildContext.getFunctionRegistry());
    final SelectValueMapper<K> selectMapper = selection.getMapper();
    final ProcessingLogger logger = buildContext.getProcessingLogger(queryContext);
    final Named selectName = Named.as(StreamsUtil.buildOpName(queryContext));
    final Optional<MaterializationInfo.Builder> matBuilder = table.getMaterializationBuilder();
    final boolean forceMaterialize = !matBuilder.isPresent();
    final Serde<K> keySerde;
    final Serde<GenericRow> valSerde;
    if (formats.isPresent()) {
        final Formats materializationFormat = formats.get();
        final PhysicalSchema physicalSchema = PhysicalSchema.from(selection.getSchema(), materializationFormat.getKeyFeatures(), materializationFormat.getValueFeatures());
        keySerde = (Serde<K>) buildContext.buildKeySerde(materializationFormat.getKeyFormat(), physicalSchema, queryContext);
        valSerde = buildContext.buildValueSerde(materializationFormat.getValueFormat(), physicalSchema, queryContext);
        if (forceMaterialize) {
            final Stacker stacker = Stacker.of(step.getProperties().getQueryContext());
            final String stateStoreName = StreamsUtil.buildOpName(stacker.push(PROJECT_OP).getQueryContext());
            final Materialized<K, GenericRow, KeyValueStore<Bytes, byte[]>> materialized = materializedFactory.create(keySerde, valSerde, stateStoreName);
            final KTable<K, GenericRow> transFormedTable = table.getTable().transformValues(() -> new KsTransformer<>(selectMapper.getTransformer(logger)), materialized);
            return KTableHolder.materialized(transFormedTable, selection.getSchema(), table.getExecutionKeyFactory(), MaterializationInfo.builder(stateStoreName, selection.getSchema()));
        }
    } else {
        keySerde = null;
        valSerde = null;
    }
    final KTable<K, GenericRow> transFormedTable = table.getTable().transformValues(() -> new KsTransformer<>(selectMapper.getTransformer(logger)), Materialized.with(keySerde, valSerde), selectName);
    final Optional<MaterializationInfo.Builder> materialization = matBuilder.map(b -> b.map(pl -> (KsqlTransformer<Object, GenericRow>) selectMapper.getTransformer(pl), selection.getSchema(), queryContext));
    return table.withTable(transFormedTable, selection.getSchema()).withMaterialization(materialization);
}
Also used : TableSelect(io.confluent.ksql.execution.plan.TableSelect) PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) KTable(org.apache.kafka.streams.kstream.KTable) RuntimeBuildContext(io.confluent.ksql.execution.runtime.RuntimeBuildContext) KsqlTransformer(io.confluent.ksql.execution.transform.KsqlTransformer) QueryContext(io.confluent.ksql.execution.context.QueryContext) MaterializationInfo(io.confluent.ksql.execution.materialization.MaterializationInfo) Formats(io.confluent.ksql.execution.plan.Formats) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Bytes(org.apache.kafka.common.utils.Bytes) KTableHolder(io.confluent.ksql.execution.plan.KTableHolder) SelectValueMapper(io.confluent.ksql.execution.transform.select.SelectValueMapper) KsTransformer(io.confluent.ksql.execution.streams.transform.KsTransformer) GenericRow(io.confluent.ksql.GenericRow) Stacker(io.confluent.ksql.execution.context.QueryContext.Stacker) Serde(org.apache.kafka.common.serialization.Serde) Named(org.apache.kafka.streams.kstream.Named) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) Materialized(org.apache.kafka.streams.kstream.Materialized) Optional(java.util.Optional) Selection(io.confluent.ksql.execution.transform.select.Selection) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) Named(org.apache.kafka.streams.kstream.Named) Stacker(io.confluent.ksql.execution.context.QueryContext.Stacker) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) QueryContext(io.confluent.ksql.execution.context.QueryContext) Formats(io.confluent.ksql.execution.plan.Formats) GenericRow(io.confluent.ksql.GenericRow) PhysicalSchema(io.confluent.ksql.schema.ksql.PhysicalSchema) KsqlTransformer(io.confluent.ksql.execution.transform.KsqlTransformer)

Example 5 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class TableFilterBuilder method build.

static <K> KTableHolder<K> build(final KTableHolder<K> table, final TableFilter<K> step, final RuntimeBuildContext buildContext, final SqlPredicateFactory sqlPredicateFactory) {
    final SqlPredicate predicate = sqlPredicateFactory.create(step.getFilterExpression(), table.getSchema(), buildContext.getKsqlConfig(), buildContext.getFunctionRegistry());
    final ProcessingLogger processingLogger = buildContext.getProcessingLogger(step.getProperties().getQueryContext());
    final Stacker stacker = Stacker.of(step.getProperties().getQueryContext());
    final KTable<K, GenericRow> filtered = table.getTable().transformValues(() -> new KsTransformer<>(predicate.getTransformer(processingLogger)), Named.as(StreamsUtil.buildOpName(stacker.push(PRE_PROCESS_OP).getQueryContext()))).filter((k, v) -> v.isPresent(), Named.as(StreamsUtil.buildOpName(stacker.push(FILTER_OP).getQueryContext()))).mapValues(Optional::get, Named.as(StreamsUtil.buildOpName(stacker.push(POST_PROCESS_OP).getQueryContext())));
    return table.withTable(filtered, table.getSchema()).withMaterialization(table.getMaterializationBuilder().map(b -> b.filter(predicate::getTransformer, step.getProperties().getQueryContext())));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) SqlPredicate(io.confluent.ksql.execution.transform.sqlpredicate.SqlPredicate) KTableHolder(io.confluent.ksql.execution.plan.KTableHolder) TableFilter(io.confluent.ksql.execution.plan.TableFilter) KTable(org.apache.kafka.streams.kstream.KTable) RuntimeBuildContext(io.confluent.ksql.execution.runtime.RuntimeBuildContext) KsTransformer(io.confluent.ksql.execution.streams.transform.KsTransformer) GenericRow(io.confluent.ksql.GenericRow) Stacker(io.confluent.ksql.execution.context.QueryContext.Stacker) Named(org.apache.kafka.streams.kstream.Named) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) Optional(java.util.Optional) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) Optional(java.util.Optional) SqlPredicate(io.confluent.ksql.execution.transform.sqlpredicate.SqlPredicate) Stacker(io.confluent.ksql.execution.context.QueryContext.Stacker)

Aggregations

ProcessingLogger (io.confluent.ksql.logging.processing.ProcessingLogger)21 GenericRow (io.confluent.ksql.GenericRow)11 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)10 QueryContext (io.confluent.ksql.execution.context.QueryContext)7 CodeGenRunner (io.confluent.ksql.execution.codegen.CodeGenRunner)6 Test (org.junit.Test)6 CompiledExpression (io.confluent.ksql.execution.codegen.CompiledExpression)5 Expression (io.confluent.ksql.execution.expression.tree.Expression)5 RuntimeBuildContext (io.confluent.ksql.execution.runtime.RuntimeBuildContext)5 GenericKey (io.confluent.ksql.GenericKey)4 Stacker (io.confluent.ksql.execution.context.QueryContext.Stacker)4 Formats (io.confluent.ksql.execution.plan.Formats)4 PhysicalSchema (io.confluent.ksql.schema.ksql.PhysicalSchema)4 KStreamHolder (io.confluent.ksql.execution.plan.KStreamHolder)3 KTableHolder (io.confluent.ksql.execution.plan.KTableHolder)3 KsTransformer (io.confluent.ksql.execution.streams.transform.KsTransformer)3 LoggingDeserializer (io.confluent.ksql.logging.processing.LoggingDeserializer)3 List (java.util.List)3 Serde (org.apache.kafka.common.serialization.Serde)3 WrapperSerde (org.apache.kafka.common.serialization.Serdes.WrapperSerde)3