use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class ListSourceExecutorTest method shouldAddWarningOnClientExceptionForDescription.
@Test
public void shouldAddWarningOnClientExceptionForDescription() {
// Given:
final KsqlStream<?> stream1 = engine.givenSource(DataSourceType.KSTREAM, "STREAM1");
final ServiceContext serviceContext = engine.getServiceContext();
serviceContext.getTopicClient().deleteTopics(ImmutableList.of("STREAM1"));
// When:
final KsqlEntity entity = CUSTOM_EXECUTORS.showColumns().execute((ConfiguredStatement<ShowColumns>) engine.configure("DESCRIBE STREAM1 EXTENDED;"), SESSION_PROPERTIES, engine.getEngine(), serviceContext).getEntity().orElseThrow(IllegalStateException::new);
// Then:
assertThat(entity, instanceOf(SourceDescriptionEntity.class));
final SourceDescriptionEntity description = (SourceDescriptionEntity) entity;
assertThat(description.getSourceDescription(), equalTo(SourceDescriptionFactory.create(stream1, true, ImmutableList.of(), ImmutableList.of(), Optional.empty(), ImmutableList.of(), ImmutableList.of(), new MetricCollectors())));
assertThat(description.getWarnings(), contains(new KsqlWarning("Error from Kafka: unknown topic: STREAM1")));
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class ListSourceExecutorTest method shouldShowTablesExtended.
@Test
public void shouldShowTablesExtended() {
// Given:
final KsqlTable<?> table1 = engine.givenSource(DataSourceType.KTABLE, "table1");
final KsqlTable<?> table2 = engine.givenSource(DataSourceType.KTABLE, "table2", ImmutableSet.of(SourceName.of("table1")));
engine.givenSource(DataSourceType.KSTREAM, "stream");
// When:
final SourceDescriptionList descriptionList = (SourceDescriptionList) CUSTOM_EXECUTORS.listTables().execute((ConfiguredStatement<ListTables>) engine.configure("LIST TABLES EXTENDED;"), SESSION_PROPERTIES, engine.getEngine(), engine.getServiceContext()).getEntity().orElseThrow(IllegalStateException::new);
// Then:
final KafkaTopicClient client = engine.getServiceContext().getTopicClient();
assertThat(descriptionList.getSourceDescriptions(), containsInAnyOrder(SourceDescriptionFactory.create(table1, true, ImmutableList.of(), ImmutableList.of(), Optional.of(client.describeTopic(table1.getKafkaTopicName())), ImmutableList.of(), ImmutableList.of("table2"), new MetricCollectors()), SourceDescriptionFactory.create(table2, true, ImmutableList.of(), ImmutableList.of(), Optional.of(client.describeTopic(table1.getKafkaTopicName())), ImmutableList.of(), ImmutableList.of(), new MetricCollectors())));
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class KsqlRestApplicationTest method shouldAddConfigurableMetricsReportersIfPresentInKsqlConfig.
@Test
public void shouldAddConfigurableMetricsReportersIfPresentInKsqlConfig() {
// When:
final MetricsReporter mockReporter = mock(MetricsReporter.class);
when(ksqlConfig.getConfiguredInstances(anyString(), any(), any())).thenReturn(Collections.singletonList(mockReporter));
final MetricCollectors metricCollectors = new MetricCollectors();
givenAppWithRestConfig(Collections.emptyMap(), metricCollectors);
// Then:
final List<MetricsReporter> reporters = metricCollectors.getMetrics().reporters();
assertThat(reporters, hasItem(mockReporter));
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class KsqlRestApplicationTest method shouldConfigureIQWithInterNodeListenerIfSet.
@Test
public void shouldConfigureIQWithInterNodeListenerIfSet() {
// Given:
givenAppWithRestConfig(ImmutableMap.of(KsqlRestConfig.LISTENERS_CONFIG, "http://localhost:0", KsqlRestConfig.ADVERTISED_LISTENER_CONFIG, "https://some.host:12345"), new MetricCollectors());
// When:
final KsqlConfig ksqlConfig = app.buildConfigWithPort();
// Then:
assertThat(ksqlConfig.getKsqlStreamConfigProps().get(StreamsConfig.APPLICATION_SERVER_CONFIG), is("https://some.host:12345"));
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class SourceDescriptionFactoryTest method shouldReturnEmptyTimestampColumn.
@Test
public void shouldReturnEmptyTimestampColumn() {
// Given:
final String kafkaTopicName = "kafka";
final DataSource dataSource = buildDataSource(kafkaTopicName, Optional.empty());
// When
final SourceDescription sourceDescription = SourceDescriptionFactory.create(dataSource, true, Collections.emptyList(), Collections.emptyList(), Optional.empty(), Collections.emptyList(), Collections.emptyList(), new MetricCollectors());
// Then:
assertThat(sourceDescription.getTimestamp(), is(""));
}
Aggregations