use of io.confluent.ksql.util.KsqlHostInfo in project ksql by confluentinc.
the class SourceDescriptionFactoryTest method shouldReturnLocalStatsBasedOnKafkaTopic.
@Test
public void shouldReturnLocalStatsBasedOnKafkaTopic() {
// Given:
final String kafkaTopicName = "kafka";
final DataSource dataSource = buildDataSource(kafkaTopicName, Optional.empty());
final MetricCollectors mock = Mockito.mock(MetricCollectors.class);
Mockito.when(mock.getAndFormatStatsFor(anyString(), anyBoolean())).thenReturn(mockStringStat);
Mockito.when(mock.getStatsFor(dataSource.getKafkaTopicName(), true)).thenReturn(errorStats);
Mockito.when(mock.getStatsFor(dataSource.getKafkaTopicName(), false)).thenReturn(stats);
KsqlHostInfo localhost = new KsqlHostInfo("myhost", 10);
// When
final SourceDescription sourceDescription = SourceDescriptionFactory.create(dataSource, true, Collections.emptyList(), Collections.emptyList(), Optional.empty(), Collections.emptyList(), Collections.emptyList(), Stream.empty(), Stream.empty(), localhost, mock);
// Then:
// TODO deprecate and remove
assertThat(sourceDescription.getStatistics(), containsString(mockStringStat));
assertThat(sourceDescription.getErrorStats(), containsString(mockStringStat));
// Also check includes its own stats in cluster stats
final Stream<QueryHostStat> localStats = stats.stream().map((s) -> QueryHostStat.fromStat(s, new KsqlHostInfoEntity(localhost)));
assertThat(localStats.collect(Collectors.toList()), everyItem(isIn(sourceDescription.getClusterStatistics())));
final Stream<QueryHostStat> localErrors = errorStats.stream().map((s) -> QueryHostStat.fromStat(s, new KsqlHostInfoEntity(localhost)));
assertThat(localErrors.collect(Collectors.toList()), everyItem(isIn(sourceDescription.getClusterErrorStats())));
}
use of io.confluent.ksql.util.KsqlHostInfo in project ksql by confluentinc.
the class SourceDescriptionFactoryTest method testShouldIncludeRemoteStatsIfProvided.
@Test
public void testShouldIncludeRemoteStatsIfProvided() {
final List<QueryHostStat> remoteStats = IntStream.range(0, 5).boxed().map(x -> new QueryHostStat(new KsqlHostInfoEntity("otherhost:1090"), ConsumerCollector.CONSUMER_MESSAGES_PER_SEC, x, x)).collect(Collectors.toList());
final List<QueryHostStat> remoteErrors = IntStream.range(0, 5).boxed().map(x -> new QueryHostStat(new KsqlHostInfoEntity("otherhost:1090"), StreamsErrorCollector.CONSUMER_FAILED_MESSAGES_PER_SEC, x, x)).collect(Collectors.toList());
// Given:
final String kafkaTopicName = "kafka";
final DataSource dataSource = buildDataSource(kafkaTopicName, Optional.empty());
final MetricCollectors mock = Mockito.mock(MetricCollectors.class);
Mockito.when(mock.getAndFormatStatsFor(anyString(), anyBoolean())).thenReturn(mockStringStat);
Mockito.when(mock.getStatsFor(dataSource.getKafkaTopicName(), true)).thenReturn(errorStats);
Mockito.when(mock.getStatsFor(dataSource.getKafkaTopicName(), false)).thenReturn(stats);
// When
final SourceDescription sourceDescription = SourceDescriptionFactory.create(dataSource, true, Collections.emptyList(), Collections.emptyList(), Optional.empty(), Collections.emptyList(), Collections.emptyList(), remoteStats.stream(), remoteErrors.stream(), new KsqlHostInfo("myhost", 10), mock);
// Then:
assertThat(remoteStats, everyItem(isIn(sourceDescription.getClusterStatistics())));
assertThat(remoteErrors, everyItem(isIn(sourceDescription.getClusterErrorStats())));
}
use of io.confluent.ksql.util.KsqlHostInfo in project ksql by confluentinc.
the class ActiveHostFilterTest method setUp.
@Before
public void setUp() {
activeHost = new KsqlHostInfo("activeHost", 2345);
HostInfo activeHostInfo = new HostInfo("activeHost", 2345);
standByHost = new KsqlHostInfo("standby1", 1234);
activeHostFilter = new ActiveHostFilter(activeHostInfo);
}
use of io.confluent.ksql.util.KsqlHostInfo in project ksql by confluentinc.
the class KsqlResource method configure.
@Override
public void configure(final KsqlConfig config) {
if (!config.getKsqlStreamConfigProps().containsKey(StreamsConfig.APPLICATION_SERVER_CONFIG)) {
throw new IllegalArgumentException("Need KS application server set");
}
final String applicationServer = (String) config.getKsqlStreamConfigProps().get(StreamsConfig.APPLICATION_SERVER_CONFIG);
final HostInfo hostInfo = ServerUtil.parseHostInfo(applicationServer);
this.localHost = new KsqlHostInfo(hostInfo.host(), hostInfo.port());
try {
this.localUrl = new URL(applicationServer);
} catch (final Exception e) {
throw new IllegalStateException("Failed to convert remote host info to URL." + " remoteInfo: " + localHost.host() + ":" + localHost.host());
}
this.validator = new RequestValidator(CustomValidators.VALIDATOR_MAP, injectorFactory, ksqlEngine::createSandbox, new ValidatedCommandFactory());
this.handler = new RequestHandler(customExecutors.EXECUTOR_MAP, new DistributingExecutor(config, commandRunner.getCommandQueue(), distributedCmdResponseTimeout, injectorFactory, authorizationValidator, new ValidatedCommandFactory(), errorHandler, commandRunnerWarning), ksqlEngine, new DefaultCommandQueueSync(commandRunner.getCommandQueue(), this::shouldSynchronize, distributedCmdResponseTimeout));
}
use of io.confluent.ksql.util.KsqlHostInfo in project ksql by confluentinc.
the class KsLocatorTest method shouldReturnRemoteOwnerForDifferentPort.
@Test
public void shouldReturnRemoteOwnerForDifferentPort() {
// Given:
final HostInfo localHostInfo = new HostInfo(LOCAL_HOST_URL.getHost(), LOCAL_HOST_URL.getPort() + 1);
final KsqlHostInfo localHost = locator.asKsqlHost(localHostInfo);
getActiveAndStandbyMetadata(localHostInfo);
when(activeFilter.filter(eq(localHost))).thenReturn(Host.include(localHost));
when(livenessFilter.filter(eq(localHost))).thenReturn(Host.include(localHost));
// When:
final List<KsqlPartitionLocation> result = locator.locate(ImmutableList.of(KEY), routingOptions, routingFilterFactoryActive, false);
// Then:
List<KsqlNode> nodeList = result.get(0).getNodes();
assertThat(nodeList.stream().findFirst().map(KsqlNode::isLocal), is(Optional.of(false)));
}
Aggregations