Search in sources :

Example 11 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class StreamFlatMapBuilder method build.

public static <K> KStreamHolder<K> build(final KStreamHolder<K> stream, final StreamFlatMap<K> step, final RuntimeBuildContext buildContext) {
    final List<FunctionCall> tableFunctions = step.getTableFunctions();
    final LogicalSchema schema = stream.getSchema();
    final Builder<TableFunctionApplier> tableFunctionAppliersBuilder = ImmutableList.builder();
    final CodeGenRunner codeGenRunner = new CodeGenRunner(schema, buildContext.getKsqlConfig(), buildContext.getFunctionRegistry());
    for (final FunctionCall functionCall : tableFunctions) {
        final List<CompiledExpression> compiledExpressionList = new ArrayList<>(functionCall.getArguments().size());
        for (final Expression expression : functionCall.getArguments()) {
            final CompiledExpression compiledExpression = codeGenRunner.buildCodeGenFromParseTree(expression, "Table function");
            compiledExpressionList.add(compiledExpression);
        }
        final KsqlTableFunction tableFunction = UdtfUtil.resolveTableFunction(buildContext.getFunctionRegistry(), functionCall, schema);
        final TableFunctionApplier tableFunctionApplier = new TableFunctionApplier(tableFunction, compiledExpressionList);
        tableFunctionAppliersBuilder.add(tableFunctionApplier);
    }
    final QueryContext queryContext = step.getProperties().getQueryContext();
    final ProcessingLogger processingLogger = buildContext.getProcessingLogger(queryContext);
    final ImmutableList<TableFunctionApplier> tableFunctionAppliers = tableFunctionAppliersBuilder.build();
    final KStream<K, GenericRow> mapped = stream.getStream().flatTransformValues(() -> new KsTransformer<>(new KudtfFlatMapper<>(tableFunctionAppliers, processingLogger)), Named.as(StreamsUtil.buildOpName(queryContext)));
    return stream.withStream(mapped, buildSchema(stream.getSchema(), step.getTableFunctions(), buildContext.getFunctionRegistry()));
}
Also used : ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) CodeGenRunner(io.confluent.ksql.execution.codegen.CodeGenRunner) ArrayList(java.util.ArrayList) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) QueryContext(io.confluent.ksql.execution.context.QueryContext) CompiledExpression(io.confluent.ksql.execution.codegen.CompiledExpression) GenericRow(io.confluent.ksql.GenericRow) CompiledExpression(io.confluent.ksql.execution.codegen.CompiledExpression) Expression(io.confluent.ksql.execution.expression.tree.Expression) KsqlTableFunction(io.confluent.ksql.function.KsqlTableFunction) TableFunctionApplier(io.confluent.ksql.execution.function.udtf.TableFunctionApplier) FunctionCall(io.confluent.ksql.execution.expression.tree.FunctionCall) KudtfFlatMapper(io.confluent.ksql.execution.function.udtf.KudtfFlatMapper)

Example 12 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class QueryBuilder method buildStreamsProperties.

public static Map<String, Object> buildStreamsProperties(final String applicationId, final Optional<QueryId> queryId, final MetricCollectors metricCollectors, final KsqlConfig config, final ProcessingLogContext processingLogContext) {
    final Map<String, Object> newStreamsProperties = new HashMap<>(config.getKsqlStreamConfigProps(applicationId));
    newStreamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
    // get logger
    final ProcessingLogger logger;
    if (queryId.isPresent()) {
        logger = processingLogContext.getLoggerFactory().getLogger(queryId.get().toString());
    } else {
        logger = processingLogContext.getLoggerFactory().getLogger(applicationId);
    }
    newStreamsProperties.put(ProductionExceptionHandlerUtil.KSQL_PRODUCTION_ERROR_LOGGER, logger);
    updateListProperty(newStreamsProperties, StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG), ConsumerCollector.class.getCanonicalName());
    updateListProperty(newStreamsProperties, StreamsConfig.producerPrefix(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG), ProducerCollector.class.getCanonicalName());
    updateListProperty(newStreamsProperties, StreamsConfig.METRIC_REPORTER_CLASSES_CONFIG, RocksDBMetricsCollector.class.getName());
    updateListProperty(newStreamsProperties, StreamsConfig.METRIC_REPORTER_CLASSES_CONFIG, StorageUtilizationMetricsReporter.class.getName());
    if (config.getBoolean(KsqlConfig.KSQL_SHARED_RUNTIME_ENABLED)) {
        newStreamsProperties.put(StreamsConfig.InternalConfig.TOPIC_PREFIX_ALTERNATIVE, ReservedInternalTopics.KSQL_INTERNAL_TOPIC_PREFIX + config.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG) + "query");
    }
    // Passing shared state into managed components
    newStreamsProperties.put(KsqlConfig.KSQL_INTERNAL_METRIC_COLLECTORS_CONFIG, metricCollectors);
    newStreamsProperties.put(KsqlConfig.KSQL_CUSTOM_METRICS_TAGS, config.getString(KsqlConfig.KSQL_CUSTOM_METRICS_TAGS));
    newStreamsProperties.put(KsqlConfig.KSQL_INTERNAL_METRICS_CONFIG, metricCollectors.getMetrics());
    newStreamsProperties.put(KsqlConfig.KSQL_INTERNAL_STREAMS_ERROR_COLLECTOR_CONFIG, StreamsErrorCollector.create(applicationId, metricCollectors));
    return newStreamsProperties;
}
Also used : ConsumerCollector(io.confluent.ksql.metrics.ConsumerCollector) ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) ProducerCollector(io.confluent.ksql.metrics.ProducerCollector) HashMap(java.util.HashMap) StorageUtilizationMetricsReporter(io.confluent.ksql.internal.StorageUtilizationMetricsReporter) RocksDBMetricsCollector(io.confluent.ksql.execution.streams.metrics.RocksDBMetricsCollector)

Example 13 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class StaticTopicSerdeTest method shouldLogOriginalFailureIfBothFail.

@Test
public void shouldLogOriginalFailureIfBothFail() {
    // Given:
    when(delegateD.deserialize(Mockito.any(), Mockito.any())).thenThrow(new RuntimeException());
    final ProcessingLogger logger = mock(ProcessingLogger.class);
    final LoggingDeserializer<Object> loggingDelegate = new LoggingDeserializer<>(delegateD, logger);
    final Serde<Object> delegate = new WrapperSerde<>(delegateS, loggingDelegate);
    staticSerde = StaticTopicSerde.wrap(STATIC_TOPIC, delegate, callback);
    // When:
    final RuntimeException err = assertThrows(RuntimeException.class, () -> staticSerde.deserializer().deserialize(SOURCE_TOPIC, SOME_BYTES));
    // Then:
    verify(logger).error(new DeserializationError(err, Optional.of(SOME_BYTES), STATIC_TOPIC, false));
    verifyNoMoreInteractions(callback);
}
Also used : ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) WrapperSerde(org.apache.kafka.common.serialization.Serdes.WrapperSerde) LoggingDeserializer(io.confluent.ksql.logging.processing.LoggingDeserializer) DeserializationError(io.confluent.ksql.logging.processing.DeserializationError) Test(org.junit.Test)

Example 14 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class StaticTopicSerdeTest method shouldUseDelegateLoggingDeserializerWithStaticTopic.

@Test
public void shouldUseDelegateLoggingDeserializerWithStaticTopic() {
    // Given:
    final ProcessingLogger logger = mock(ProcessingLogger.class);
    final LoggingDeserializer<Object> loggingDelegate = new LoggingDeserializer<>(delegateD, logger);
    final Serde<Object> delegate = new WrapperSerde<>(delegateS, loggingDelegate);
    staticSerde = StaticTopicSerde.wrap(STATIC_TOPIC, delegate, callback);
    // When:
    final Object deserialized = staticSerde.deserializer().deserialize(SOURCE_TOPIC, SOME_BYTES);
    // Then:
    verify(delegateD).deserialize(STATIC_TOPIC, SOME_BYTES);
    assertThat(deserialized, is(SOME_OBJECT));
    verifyNoMoreInteractions(callback);
    verifyNoMoreInteractions(logger);
}
Also used : ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) WrapperSerde(org.apache.kafka.common.serialization.Serdes.WrapperSerde) LoggingDeserializer(io.confluent.ksql.logging.processing.LoggingDeserializer) Test(org.junit.Test)

Example 15 with ProcessingLogger

use of io.confluent.ksql.logging.processing.ProcessingLogger in project ksql by confluentinc.

the class QueryBuilderTest method shouldConfigureProducerErrorHandler.

@Test
public void shouldConfigureProducerErrorHandler() {
    final ProcessingLogger logger = mock(ProcessingLogger.class);
    when(processingLoggerFactory.getLogger(QUERY_ID.toString())).thenReturn(logger);
    // When:
    buildPersistentQuery(SOURCES, KsqlConstants.PersistentQueryType.CREATE_AS, QUERY_ID).initialize();
    // Then:
    final Map<String, Object> streamsProps = capturedStreamsProperties();
    assertThat(streamsProps.get(ProductionExceptionHandlerUtil.KSQL_PRODUCTION_ERROR_LOGGER), is(logger));
}
Also used : ProcessingLogger(io.confluent.ksql.logging.processing.ProcessingLogger) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) Test(org.junit.Test)

Aggregations

ProcessingLogger (io.confluent.ksql.logging.processing.ProcessingLogger)21 GenericRow (io.confluent.ksql.GenericRow)11 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)10 QueryContext (io.confluent.ksql.execution.context.QueryContext)7 CodeGenRunner (io.confluent.ksql.execution.codegen.CodeGenRunner)6 Test (org.junit.Test)6 CompiledExpression (io.confluent.ksql.execution.codegen.CompiledExpression)5 Expression (io.confluent.ksql.execution.expression.tree.Expression)5 RuntimeBuildContext (io.confluent.ksql.execution.runtime.RuntimeBuildContext)5 GenericKey (io.confluent.ksql.GenericKey)4 Stacker (io.confluent.ksql.execution.context.QueryContext.Stacker)4 Formats (io.confluent.ksql.execution.plan.Formats)4 PhysicalSchema (io.confluent.ksql.schema.ksql.PhysicalSchema)4 KStreamHolder (io.confluent.ksql.execution.plan.KStreamHolder)3 KTableHolder (io.confluent.ksql.execution.plan.KTableHolder)3 KsTransformer (io.confluent.ksql.execution.streams.transform.KsTransformer)3 LoggingDeserializer (io.confluent.ksql.logging.processing.LoggingDeserializer)3 List (java.util.List)3 Serde (org.apache.kafka.common.serialization.Serde)3 WrapperSerde (org.apache.kafka.common.serialization.Serdes.WrapperSerde)3