Search in sources :

Example 6 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class GenericSerdeFactory method wrapInLoggingSerde.

// Part of injected API
@SuppressWarnings("MethodMayBeStatic")
<T> Serde<T> wrapInLoggingSerde(final Serde<T> formatSerde, final String loggerNamePrefix, final ProcessingLogContext processingLogContext) {
    final ProcessingLogger serializerProcessingLogger = processingLogContext.getLoggerFactory().getLogger(join(loggerNamePrefix, SERIALIZER_LOGGER_NAME));
    final ProcessingLogger deserializerProcessingLogger = processingLogContext.getLoggerFactory().getLogger(join(loggerNamePrefix, DESERIALIZER_LOGGER_NAME));
    return Serdes.serdeFrom(new LoggingSerializer<>(formatSerde.serializer(), serializerProcessingLogger), new LoggingDeserializer<>(formatSerde.deserializer(), deserializerProcessingLogger));
}
Also used : ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger)

Example 7 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class PartitionByParamsFactory method build.

public static <K> PartitionByParams<K> build(final LogicalSchema sourceSchema, final ExecutionKeyFactory<K> serdeFactory, final List<Expression> partitionBys, final KsqlConfig ksqlConfig, final FunctionRegistry functionRegistry, final ProcessingLogger logger) {
    final List<PartitionByColumn> partitionByCols = getPartitionByColumnName(sourceSchema, partitionBys);
    final LogicalSchema resultSchema = buildSchema(sourceSchema, partitionBys, functionRegistry, partitionByCols);
    final Mapper<K> mapper;
    if (isPartitionByNull(partitionBys)) {
        // In case of PARTITION BY NULL, it is sufficient to set the new key to null as the old key
        // is already present in the current value
        mapper = (k, v) -> new KeyValue<>(null, v);
    } else {
        final List<PartitionByExpressionEvaluator> evaluators = partitionBys.stream().map(pby -> {
            final Set<? extends ColumnReferenceExp> sourceColsInPartitionBy = ColumnExtractor.extractColumns(pby);
            final boolean partitionByInvolvesKeyColsOnly = sourceColsInPartitionBy.stream().map(ColumnReferenceExp::getColumnName).allMatch(sourceSchema::isKeyColumn);
            return buildExpressionEvaluator(sourceSchema, pby, ksqlConfig, functionRegistry, logger, partitionByInvolvesKeyColsOnly);
        }).collect(Collectors.toList());
        mapper = buildMapper(partitionByCols, evaluators, serdeFactory);
    }
    return new PartitionByParams<>(resultSchema, mapper);
}
Also used : ColumnExtractor(io.confluent.ksql.execution.util.ColumnExtractor) ColumnName(io.confluent.ksql.name.ColumnName) CompiledExpression(io.confluent.ksql.execution.codegen.CompiledExpression) KeyUtil(io.confluent.ksql.execution.util.KeyUtil) Supplier(java.util.function.Supplier) CodeGenRunner(io.confluent.ksql.execution.codegen.CodeGenRunner) ExpressionTypeManager(io.confluent.ksql.execution.util.ExpressionTypeManager) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) NullLiteral(io.confluent.ksql.execution.expression.tree.NullLiteral) ColumnReferenceExp(io.confluent.ksql.execution.expression.tree.ColumnReferenceExp) Mapper(io.confluent.ksql.execution.streams.PartitionByParams.Mapper) SqlType(io.confluent.ksql.schema.ksql.types.SqlType) ColumnNames(io.confluent.ksql.schema.ksql.ColumnNames) Expression(io.confluent.ksql.execution.expression.tree.Expression) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) KeyValue(org.apache.kafka.streams.KeyValue) Set(java.util.Set) KsqlConfig(io.confluent.ksql.util.KsqlConfig) ExecutionKeyFactory(io.confluent.ksql.execution.plan.ExecutionKeyFactory) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Collectors(java.util.stream.Collectors) Builder(io.confluent.ksql.schema.ksql.LogicalSchema.Builder) Objects(java.util.Objects) List(java.util.List) ColumnAliasGenerator(io.confluent.ksql.schema.ksql.ColumnAliasGenerator) Stream(java.util.stream.Stream) GenericRow(io.confluent.ksql.GenericRow) KsqlException(io.confluent.ksql.util.KsqlException) GenericKey(io.confluent.ksql.GenericKey) Column(io.confluent.ksql.schema.ksql.Column) ColumnReferenceExp(io.confluent.ksql.execution.expression.tree.ColumnReferenceExp) Set(java.util.Set) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema)

Example 8 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class StreamSelectBuilder method build.

public static <K> KStreamHolder<K> build(final KStreamHolder<K> stream, final StreamSelect<K> step, final RuntimeBuildContext buildContext) {
    final QueryContext queryContext = step.getProperties().getQueryContext();
    final LogicalSchema sourceSchema = stream.getSchema();
    final Selection<K> selection = Selection.of(sourceSchema, step.getKeyColumnNames(), step.getSelectExpressions(), buildContext.getKsqlConfig(), buildContext.getFunctionRegistry());
    final SelectValueMapper<K> selectMapper = selection.getMapper();
    final ProcessingLogger logger = buildContext.getProcessingLogger(queryContext);
    final Named selectName = Named.as(StreamsUtil.buildOpName(queryContext));
    return stream.withStream(stream.getStream().transformValues(() -> new KsTransformer<>(selectMapper.getTransformer(logger)), selectName), selection.getSchema());
}
Also used : ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) Named(org.apache.kafka.streams.kstream.Named) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) KsTransformer(io.confluent.ksql.execution.streams.transform.KsTransformer) QueryContext(io.confluent.ksql.execution.context.QueryContext)

Example 9 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class StreamSelectKeyBuilderV1 method build.

public static KStreamHolder<GenericKey> build(final KStreamHolder<?> stream, final StreamSelectKeyV1 selectKey, final RuntimeBuildContext buildContext) {
    final LogicalSchema sourceSchema = stream.getSchema();
    final CompiledExpression expression = buildExpressionEvaluator(selectKey, buildContext, sourceSchema);
    final ProcessingLogger processingLogger = buildContext.getProcessingLogger(selectKey.getProperties().getQueryContext());
    final String errorMsg = "Error extracting new key using expression " + selectKey.getKeyExpression();
    final Function<GenericRow, Object> evaluator = val -> expression.evaluate(val, null, processingLogger, () -> errorMsg);
    final LogicalSchema resultSchema = new StepSchemaResolver(buildContext.getKsqlConfig(), buildContext.getFunctionRegistry()).resolve(selectKey, sourceSchema);
    final KStream<?, GenericRow> kstream = stream.getStream();
    final KStream<GenericKey, GenericRow> rekeyed = kstream.filter((key, val) -> val != null && evaluator.apply(val) != null).selectKey((key, val) -> GenericKey.genericKey(evaluator.apply(val)));
    return new KStreamHolder<>(rekeyed, resultSchema, ExecutionKeyFactory.unwindowed(buildContext));
}
Also used : KStreamHolder(io.confluent.ksql.execution.plan.KStreamHolder) StreamSelectKeyV1(io.confluent.ksql.execution.plan.StreamSelectKeyV1) RuntimeBuildContext(io.confluent.ksql.execution.runtime.RuntimeBuildContext) GenericRow(io.confluent.ksql.GenericRow) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) CompiledExpression(io.confluent.ksql.execution.codegen.CompiledExpression) GenericKey(io.confluent.ksql.GenericKey) KStream(org.apache.kafka.streams.kstream.KStream) ExecutionKeyFactory(io.confluent.ksql.execution.plan.ExecutionKeyFactory) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Function(java.util.function.Function) CodeGenRunner(io.confluent.ksql.execution.codegen.CodeGenRunner) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) KStreamHolder(io.confluent.ksql.execution.plan.KStreamHolder) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) CompiledExpression(io.confluent.ksql.execution.codegen.CompiledExpression) GenericRow(io.confluent.ksql.GenericRow) GenericKey(io.confluent.ksql.GenericKey)

Example 10 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class TableSelectKeyBuilder method build.

@VisibleForTesting
static <K> KTableHolder<K> build(final KTableHolder<K> table, final TableSelectKey<K> selectKey, final RuntimeBuildContext buildContext, final MaterializedFactory materializedFactory, final PartitionByParamsBuilder paramsBuilder) {
    final LogicalSchema sourceSchema = table.getSchema();
    final QueryContext queryContext = selectKey.getProperties().getQueryContext();
    final ProcessingLogger logger = buildContext.getProcessingLogger(queryContext);
    final PartitionByParams<K> params = paramsBuilder.build(sourceSchema, table.getExecutionKeyFactory(), selectKey.getKeyExpressions(), buildContext.getKsqlConfig(), buildContext.getFunctionRegistry(), logger);
    final Mapper<K> mapper = params.getMapper();
    final KTable<K, GenericRow> kTable = table.getTable();
    final Materialized<K, GenericRow, KeyValueStore<Bytes, byte[]>> materialized = MaterializationUtil.buildMaterialized(selectKey, params.getSchema(), selectKey.getInternalFormats(), buildContext, materializedFactory, table.getExecutionKeyFactory());
    final KTable<K, GenericRow> reKeyed = kTable.toStream().map(mapper, Named.as(queryContext.formatContext() + "-SelectKey-Mapper")).toTable(Named.as(queryContext.formatContext() + "-SelectKey"), materialized);
    final MaterializationInfo.Builder materializationBuilder = MaterializationInfo.builder(StreamsUtil.buildOpName(MaterializationUtil.materializeContext(selectKey)), params.getSchema());
    return KTableHolder.materialized(reKeyed, params.getSchema(), table.getExecutionKeyFactory().withQueryBuilder(buildContext), materializationBuilder);
}
Also used : GenericRow(io.confluent.ksql.GenericRow) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) MaterializationInfo(io.confluent.ksql.execution.materialization.MaterializationInfo) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) KeyValueStore(org.apache.kafka.streams.state.KeyValueStore) QueryContext(io.confluent.ksql.execution.context.QueryContext) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Aggregations

ProcessingLogger (io.confluent.ksql.logging.processing.ProcessingLogger)21 GenericRow (io.confluent.ksql.GenericRow)11 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)10 QueryContext (io.confluent.ksql.execution.context.QueryContext)7 CodeGenRunner (io.confluent.ksql.execution.codegen.CodeGenRunner)6 Test (org.junit.Test)6 CompiledExpression (io.confluent.ksql.execution.codegen.CompiledExpression)5 Expression (io.confluent.ksql.execution.expression.tree.Expression)5 RuntimeBuildContext (io.confluent.ksql.execution.runtime.RuntimeBuildContext)5 GenericKey (io.confluent.ksql.GenericKey)4 Stacker (io.confluent.ksql.execution.context.QueryContext.Stacker)4 Formats (io.confluent.ksql.execution.plan.Formats)4 PhysicalSchema (io.confluent.ksql.schema.ksql.PhysicalSchema)4 KStreamHolder (io.confluent.ksql.execution.plan.KStreamHolder)3 KTableHolder (io.confluent.ksql.execution.plan.KTableHolder)3 KsTransformer (io.confluent.ksql.execution.streams.transform.KsTransformer)3 LoggingDeserializer (io.confluent.ksql.logging.processing.LoggingDeserializer)3 List (java.util.List)3 Serde (org.apache.kafka.common.serialization.Serde)3 WrapperSerde (org.apache.kafka.common.serialization.Serdes.WrapperSerde)3