use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class SourceNodeTest method shouldBuildFromDataSource.
@Test
public void shouldBuildFromDataSource() {
// Given:
final LogicalSchema schema = LogicalSchema.builder().valueColumn(ColumnName.of("bob"), SqlTypes.BIGINT).build();
final KsqlTopic topic = mock(KsqlTopic.class);
when(topic.getKeyFormat()).thenReturn(KeyFormat.windowed(FormatInfo.of("AVRO", ImmutableMap.of("some", "prop")), SerdeFeatures.of(SerdeFeature.UNWRAP_SINGLES), WindowInfo.of(WindowType.HOPPING, Optional.of(Duration.ofMillis(10)))));
when(topic.getValueFormat()).thenReturn(ValueFormat.of(FormatInfo.of("DELIMITED", ImmutableMap.of("some1", "prop1")), SerdeFeatures.of(SerdeFeature.WRAP_SINGLES)));
final DataSource source = mock(DataSource.class);
when(source.getName()).thenReturn(SourceName.of("the Name"));
when(source.getDataSourceType()).thenReturn(DataSourceType.KTABLE);
when(source.getSchema()).thenReturn(schema);
when(source.getKsqlTopic()).thenReturn(topic);
// When:
final SourceNode sourceNode = SourceNode.fromDataSource(source);
// Then:
assertThat(sourceNode, is(new SourceNode("the Name", "TABLE", Optional.of(schema.toString()), Optional.of(new KeyFormatNode(Optional.of("AVRO"), Optional.of(WindowType.HOPPING), Optional.of(10L))), Optional.of("DELIMITED"), Optional.of(ImmutableSet.of(SerdeFeature.UNWRAP_SINGLES)), Optional.of(ImmutableSet.of(SerdeFeature.WRAP_SINGLES)), Optional.of(false))));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class MetaStoreImplTest method shouldBeThreadSafe.
@Test
public void shouldBeThreadSafe() {
IntStream.range(0, 1_000).parallel().forEach(idx -> {
final DataSource source = mock(DataSource.class);
when(source.getName()).thenReturn(SourceName.of("source" + idx));
metaStore.putSource(source, false);
metaStore.getSource(source.getName());
metaStore.getAllDataSources();
metaStore.copy();
metaStore.deleteSource(source.getName());
});
assertThat(metaStore.getAllDataSources().keySet(), is(empty()));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class SourceDescriptionFactoryTest method shouldReturnTimestampColumnIfPresent.
@Test
public void shouldReturnTimestampColumnIfPresent() {
// Given:
final String kafkaTopicName = "kafka";
final DataSource dataSource = buildDataSource(kafkaTopicName, Optional.of(new TimestampColumn(ColumnName.of("foo"), Optional.empty())));
// When
final SourceDescription sourceDescription = SourceDescriptionFactory.create(dataSource, true, Collections.emptyList(), Collections.emptyList(), Optional.empty(), Collections.emptyList(), Collections.emptyList(), new MetricCollectors());
// Then:
assertThat(sourceDescription.getTimestamp(), is("foo"));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class SourceDescriptionFactoryTest method shouldReturnLocalStatsBasedOnKafkaTopic.
@Test
public void shouldReturnLocalStatsBasedOnKafkaTopic() {
// Given:
final String kafkaTopicName = "kafka";
final DataSource dataSource = buildDataSource(kafkaTopicName, Optional.empty());
final MetricCollectors mock = Mockito.mock(MetricCollectors.class);
Mockito.when(mock.getAndFormatStatsFor(anyString(), anyBoolean())).thenReturn(mockStringStat);
Mockito.when(mock.getStatsFor(dataSource.getKafkaTopicName(), true)).thenReturn(errorStats);
Mockito.when(mock.getStatsFor(dataSource.getKafkaTopicName(), false)).thenReturn(stats);
KsqlHostInfo localhost = new KsqlHostInfo("myhost", 10);
// When
final SourceDescription sourceDescription = SourceDescriptionFactory.create(dataSource, true, Collections.emptyList(), Collections.emptyList(), Optional.empty(), Collections.emptyList(), Collections.emptyList(), Stream.empty(), Stream.empty(), localhost, mock);
// Then:
// TODO deprecate and remove
assertThat(sourceDescription.getStatistics(), containsString(mockStringStat));
assertThat(sourceDescription.getErrorStats(), containsString(mockStringStat));
// Also check includes its own stats in cluster stats
final Stream<QueryHostStat> localStats = stats.stream().map((s) -> QueryHostStat.fromStat(s, new KsqlHostInfoEntity(localhost)));
assertThat(localStats.collect(Collectors.toList()), everyItem(isIn(sourceDescription.getClusterStatistics())));
final Stream<QueryHostStat> localErrors = errorStats.stream().map((s) -> QueryHostStat.fromStat(s, new KsqlHostInfoEntity(localhost)));
assertThat(localErrors.collect(Collectors.toList()), everyItem(isIn(sourceDescription.getClusterErrorStats())));
}
use of io.confluent.ksql.metastore.model.DataSource in project ksql by confluentinc.
the class SourceDescriptionFactoryTest method testShouldIncludeRemoteStatsIfProvided.
@Test
public void testShouldIncludeRemoteStatsIfProvided() {
final List<QueryHostStat> remoteStats = IntStream.range(0, 5).boxed().map(x -> new QueryHostStat(new KsqlHostInfoEntity("otherhost:1090"), ConsumerCollector.CONSUMER_MESSAGES_PER_SEC, x, x)).collect(Collectors.toList());
final List<QueryHostStat> remoteErrors = IntStream.range(0, 5).boxed().map(x -> new QueryHostStat(new KsqlHostInfoEntity("otherhost:1090"), StreamsErrorCollector.CONSUMER_FAILED_MESSAGES_PER_SEC, x, x)).collect(Collectors.toList());
// Given:
final String kafkaTopicName = "kafka";
final DataSource dataSource = buildDataSource(kafkaTopicName, Optional.empty());
final MetricCollectors mock = Mockito.mock(MetricCollectors.class);
Mockito.when(mock.getAndFormatStatsFor(anyString(), anyBoolean())).thenReturn(mockStringStat);
Mockito.when(mock.getStatsFor(dataSource.getKafkaTopicName(), true)).thenReturn(errorStats);
Mockito.when(mock.getStatsFor(dataSource.getKafkaTopicName(), false)).thenReturn(stats);
// When
final SourceDescription sourceDescription = SourceDescriptionFactory.create(dataSource, true, Collections.emptyList(), Collections.emptyList(), Optional.empty(), Collections.emptyList(), Collections.emptyList(), remoteStats.stream(), remoteErrors.stream(), new KsqlHostInfo("myhost", 10), mock);
// Then:
assertThat(remoteStats, everyItem(isIn(sourceDescription.getClusterStatistics())));
assertThat(remoteErrors, everyItem(isIn(sourceDescription.getClusterErrorStats())));
}
Aggregations