use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.
the class PullPhysicalPlanBuilder method translateFilterNode.
private SelectOperator translateFilterNode(final QueryFilterNode logicalNode) {
lookupConstraints = logicalNode.getLookupConstraints();
final ProcessingLogger logger = processingLogContext.getLoggerFactory().getLogger(QueryLoggerUtil.queryLoggerName(QueryType.PULL_QUERY, contextStacker.push("SELECT").getQueryContext()));
return new SelectOperator(logicalNode, logger);
}
use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.
the class StaticTopicSerdeTest method shouldTrySourceTopicAndCallCallbackOnDeserializationFailureWithLoggingDeserializer.
@Test
public void shouldTrySourceTopicAndCallCallbackOnDeserializationFailureWithLoggingDeserializer() {
// Given:
when(delegateD.deserialize(Mockito.eq(STATIC_TOPIC), Mockito.any())).thenThrow(new RuntimeException());
final ProcessingLogger logger = mock(ProcessingLogger.class);
final LoggingDeserializer<Object> loggingDelegate = new LoggingDeserializer<>(delegateD, logger);
final Serde<Object> delegate = new WrapperSerde<>(delegateS, loggingDelegate);
staticSerde = StaticTopicSerde.wrap(STATIC_TOPIC, delegate, callback);
// When:
final Object deserialized = staticSerde.deserializer().deserialize(SOURCE_TOPIC, SOME_BYTES);
// Then:
verifyNoMoreInteractions(logger);
verify(callback).onDeserializationFailure(SOURCE_TOPIC, STATIC_TOPIC, SOME_BYTES);
assertThat(deserialized, is(SOME_OBJECT));
}
use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.
the class StreamSelectKeyBuilder method build.
@VisibleForTesting
static <K> KStreamHolder<K> build(final KStreamHolder<K> stream, final StreamSelectKey<K> selectKey, final RuntimeBuildContext buildContext, final PartitionByParamsBuilder paramsBuilder) {
final LogicalSchema sourceSchema = stream.getSchema();
final QueryContext queryContext = selectKey.getProperties().getQueryContext();
final ProcessingLogger logger = buildContext.getProcessingLogger(queryContext);
final PartitionByParams<K> params = paramsBuilder.build(sourceSchema, stream.getExecutionKeyFactory(), selectKey.getKeyExpressions(), buildContext.getKsqlConfig(), buildContext.getFunctionRegistry(), logger);
final Mapper<K> mapper = params.getMapper();
final KStream<K, GenericRow> kStream = stream.getStream();
final KStream<K, GenericRow> reKeyed = kStream.map(mapper, Named.as(queryContext.formatContext() + "-SelectKey"));
return new KStreamHolder<>(reKeyed, params.getSchema(), stream.getExecutionKeyFactory().withQueryBuilder(buildContext));
}
use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.
the class StreamGroupByBuilderBase method build.
public <K> KGroupedStreamHolder build(final KStreamHolder<K> stream, final QueryContext queryContext, final Formats formats, final List<Expression> groupByExpressions) {
final LogicalSchema sourceSchema = stream.getSchema();
final List<CompiledExpression> groupBy = CodeGenRunner.compileExpressions(groupByExpressions.stream(), "Group By", sourceSchema, buildContext.getKsqlConfig(), buildContext.getFunctionRegistry());
final ProcessingLogger logger = buildContext.getProcessingLogger(queryContext);
final GroupByParams params = paramsFactory.build(sourceSchema, groupBy, logger);
final Grouped<GenericKey, GenericRow> grouped = buildGrouped(formats, params.getSchema(), queryContext, buildContext, groupedFactory);
final KGroupedStream<GenericKey, GenericRow> groupedStream = stream.getStream().filter((k, v) -> v != null).groupBy((k, v) -> params.getMapper().apply(v), grouped);
return KGroupedStreamHolder.of(groupedStream, params.getSchema());
}
use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.
the class StreamFilterBuilder method build.
static <K> KStreamHolder<K> build(final KStreamHolder<K> stream, final StreamFilter<K> step, final RuntimeBuildContext buildContext, final SqlPredicateFactory predicateFactory) {
final SqlPredicate predicate = predicateFactory.create(step.getFilterExpression(), stream.getSchema(), buildContext.getKsqlConfig(), buildContext.getFunctionRegistry());
final ProcessingLogger processingLogger = buildContext.getProcessingLogger(step.getProperties().getQueryContext());
final KStream<K, GenericRow> filtered = stream.getStream().flatTransformValues(() -> toFlatMapTransformer(predicate.getTransformer(processingLogger)), Named.as(StreamsUtil.buildOpName(step.getProperties().getQueryContext())));
return stream.withStream(filtered, stream.getSchema());
}
Aggregations