Search in sources :

Example 1 with MetricCollectors

use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.

the class QueryBuilderTest method shouldBuildTransientQueryCorrectly.

@Test
public void shouldBuildTransientQueryCorrectly() {
    // Given:
    givenTransientQuery();
    // When:
    final TransientQueryMetadata queryMetadata = queryBuilder.buildTransientQuery(STATEMENT_TEXT, QUERY_ID, SOURCES.stream().map(DataSource::getName).collect(Collectors.toSet()), physicalPlan, SUMMARY, TRANSIENT_SINK_SCHEMA, LIMIT, Optional.empty(), false, queryListener, streamsBuilder, Optional.empty(), new MetricCollectors());
    queryMetadata.initialize();
    // Then:
    assertThat(queryMetadata.getStatementString(), equalTo(STATEMENT_TEXT));
    assertThat(queryMetadata.getSourceNames(), equalTo(SOURCES.stream().map(DataSource::getName).collect(Collectors.toSet())));
    assertThat(queryMetadata.getExecutionPlan(), equalTo(SUMMARY));
    assertThat(queryMetadata.getTopology(), is(topology));
    assertThat(queryMetadata.getOverriddenProperties(), equalTo(OVERRIDES));
    verify(kafkaStreamsBuilder).build(any(), propertyCaptor.capture());
    assertThat(queryMetadata.getStreamsProperties(), equalTo(propertyCaptor.getValue()));
}
Also used : MetricCollectors(io.confluent.ksql.metrics.MetricCollectors) TransientQueryMetadata(io.confluent.ksql.util.TransientQueryMetadata) DataSource(io.confluent.ksql.metastore.model.DataSource) Test(org.junit.Test)

Example 2 with MetricCollectors

use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.

the class KsqlServerMain method main.

public static void main(final String[] args) {
    try {
        final ServerOptions serverOptions = ServerOptions.parse(args);
        if (serverOptions == null) {
            return;
        }
        final Map<String, String> properties = PropertiesUtil.applyOverrides(PropertiesUtil.loadProperties(serverOptions.getPropertiesFile()), System.getProperties());
        final String installDir = properties.getOrDefault("ksql.server.install.dir", "");
        final KsqlConfig ksqlConfig = new KsqlConfig(properties);
        validateConfig(ksqlConfig);
        QueryLogger.configure(ksqlConfig);
        final Optional<String> queriesFile = serverOptions.getQueriesFile(properties);
        final MetricCollectors metricCollectors = new MetricCollectors();
        final Executable executable = createExecutable(properties, queriesFile, installDir, ksqlConfig, metricCollectors);
        new KsqlServerMain(executable, r -> Runtime.getRuntime().addShutdownHook(new Thread(r))).tryStartApp();
    } catch (final Exception e) {
        log.error("Failed to start KSQL", e);
        System.exit(-1);
    }
}
Also used : StreamsConfig(org.apache.kafka.streams.StreamsConfig) Logger(org.slf4j.Logger) PropertiesUtil(io.confluent.ksql.properties.PropertiesUtil) FormatFactory(io.confluent.ksql.serde.FormatFactory) Executor(java.util.concurrent.Executor) LoggerFactory(org.slf4j.LoggerFactory) IOException(java.io.IOException) KsqlConfig(io.confluent.ksql.util.KsqlConfig) MetricCollectors(io.confluent.ksql.metrics.MetricCollectors) File(java.io.File) Objects(java.util.Objects) CountDownLatch(java.util.concurrent.CountDownLatch) KsqlServerException(io.confluent.ksql.util.KsqlServerException) Map(java.util.Map) QueryLogger(io.confluent.ksql.logging.query.QueryLogger) KsqlException(io.confluent.ksql.util.KsqlException) Optional(java.util.Optional) VisibleForTesting(com.google.common.annotations.VisibleForTesting) MetricCollectors(io.confluent.ksql.metrics.MetricCollectors) KsqlConfig(io.confluent.ksql.util.KsqlConfig) IOException(java.io.IOException) KsqlServerException(io.confluent.ksql.util.KsqlServerException) KsqlException(io.confluent.ksql.util.KsqlException)

Example 3 with MetricCollectors

use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.

the class KsqlResourceTest method shouldDescribeStreamsExtended.

@Test
public void shouldDescribeStreamsExtended() {
    // Given:
    final LogicalSchema schema = LogicalSchema.builder().keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING).valueColumn(ColumnName.of("FIELD1"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("FIELD2"), SqlTypes.STRING).build();
    givenSource(DataSourceType.KSTREAM, "new_stream", "new_topic", schema);
    // When:
    final SourceDescriptionList descriptionList = makeSingleRequest("DESCRIBE STREAMS EXTENDED;", SourceDescriptionList.class);
    // Then:
    assertThat(descriptionList.getSourceDescriptions(), containsInAnyOrder(SourceDescriptionFactory.create(ksqlEngine.getMetaStore().getSource(SourceName.of("TEST_STREAM")), true, Collections.emptyList(), Collections.emptyList(), Optional.of(kafkaTopicClient.describeTopic("KAFKA_TOPIC_2")), Collections.emptyList(), Collections.emptyList(), new MetricCollectors()), SourceDescriptionFactory.create(ksqlEngine.getMetaStore().getSource(SourceName.of("new_stream")), true, Collections.emptyList(), Collections.emptyList(), Optional.of(kafkaTopicClient.describeTopic("new_topic")), Collections.emptyList(), Collections.emptyList(), new MetricCollectors())));
}
Also used : SourceDescriptionList(io.confluent.ksql.rest.entity.SourceDescriptionList) MetricCollectors(io.confluent.ksql.metrics.MetricCollectors) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Test(org.junit.Test)

Example 4 with MetricCollectors

use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.

the class KsqlResourceTest method shouldDescribeTablesExtended.

@Test
public void shouldDescribeTablesExtended() {
    // Given:
    final LogicalSchema schema = LogicalSchema.builder().keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING).valueColumn(ColumnName.of("FIELD1"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("FIELD2"), SqlTypes.STRING).build();
    givenSource(DataSourceType.KTABLE, "new_table", "new_topic", schema, ImmutableSet.of(SourceName.of("TEST_TABLE")));
    // When:
    final SourceDescriptionList descriptionList = makeSingleRequest("DESCRIBE TABLES EXTENDED;", SourceDescriptionList.class);
    // Then:
    assertThat(descriptionList.getSourceDescriptions(), containsInAnyOrder(SourceDescriptionFactory.create(ksqlEngine.getMetaStore().getSource(SourceName.of("TEST_TABLE")), true, Collections.emptyList(), Collections.emptyList(), Optional.of(kafkaTopicClient.describeTopic("KAFKA_TOPIC_1")), Collections.emptyList(), ImmutableList.of("new_table"), new MetricCollectors()), SourceDescriptionFactory.create(ksqlEngine.getMetaStore().getSource(SourceName.of("new_table")), true, Collections.emptyList(), Collections.emptyList(), Optional.of(kafkaTopicClient.describeTopic("new_topic")), Collections.emptyList(), Collections.emptyList(), new MetricCollectors())));
}
Also used : SourceDescriptionList(io.confluent.ksql.rest.entity.SourceDescriptionList) MetricCollectors(io.confluent.ksql.metrics.MetricCollectors) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Test(org.junit.Test)

Example 5 with MetricCollectors

use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.

the class KsqlResourceTest method shouldShowTablesExtended.

@Test
public void shouldShowTablesExtended() {
    // Given:
    final LogicalSchema schema = LogicalSchema.builder().keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING).valueColumn(ColumnName.of("FIELD1"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("FIELD2"), SqlTypes.STRING).build();
    givenSource(DataSourceType.KTABLE, "new_table", "new_topic", schema, ImmutableSet.of(SourceName.of("TEST_TABLE")));
    // When:
    final SourceDescriptionList descriptionList = makeSingleRequest("SHOW TABLES EXTENDED;", SourceDescriptionList.class);
    // Then:
    assertThat(descriptionList.getSourceDescriptions(), containsInAnyOrder(SourceDescriptionFactory.create(ksqlEngine.getMetaStore().getSource(SourceName.of("TEST_TABLE")), true, Collections.emptyList(), Collections.emptyList(), Optional.of(kafkaTopicClient.describeTopic("KAFKA_TOPIC_1")), Collections.emptyList(), ImmutableList.of("new_table"), new MetricCollectors()), SourceDescriptionFactory.create(ksqlEngine.getMetaStore().getSource(SourceName.of("new_table")), true, Collections.emptyList(), Collections.emptyList(), Optional.of(kafkaTopicClient.describeTopic("new_topic")), Collections.emptyList(), Collections.emptyList(), new MetricCollectors())));
}
Also used : SourceDescriptionList(io.confluent.ksql.rest.entity.SourceDescriptionList) MetricCollectors(io.confluent.ksql.metrics.MetricCollectors) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Test(org.junit.Test)

Aggregations

MetricCollectors (io.confluent.ksql.metrics.MetricCollectors)46 Test (org.junit.Test)29 KsqlConfig (io.confluent.ksql.util.KsqlConfig)15 SourceDescriptionList (io.confluent.ksql.rest.entity.SourceDescriptionList)13 Before (org.junit.Before)13 InternalFunctionRegistry (io.confluent.ksql.function.InternalFunctionRegistry)12 MetaStoreImpl (io.confluent.ksql.metastore.MetaStoreImpl)10 DataSource (io.confluent.ksql.metastore.model.DataSource)10 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)10 KsqlEngine (io.confluent.ksql.engine.KsqlEngine)9 ServiceContext (io.confluent.ksql.services.ServiceContext)9 SequentialQueryIdGenerator (io.confluent.ksql.query.id.SequentialQueryIdGenerator)8 ConfiguredStatement (io.confluent.ksql.statement.ConfiguredStatement)8 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)8 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)8 SourceDescriptionEntity (io.confluent.ksql.rest.entity.SourceDescriptionEntity)7 TestServiceContext (io.confluent.ksql.services.TestServiceContext)7 Collections (java.util.Collections)7 Map (java.util.Map)7 Optional (java.util.Optional)7