use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class SourceDescriptionFactoryTest method shouldReturnSourceConstraints.
@Test
public void shouldReturnSourceConstraints() {
// Given:
final String kafkaTopicName = "kafka";
final DataSource dataSource = buildDataSource(kafkaTopicName, Optional.empty());
// When
final SourceDescription sourceDescription = SourceDescriptionFactory.create(dataSource, true, Collections.emptyList(), Collections.emptyList(), Optional.empty(), Collections.emptyList(), ImmutableList.of("s1", "s2"), new MetricCollectors());
// Then:
assertThat(sourceDescription.getSourceConstraints(), hasItems("s1", "s2"));
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class StandaloneExecutorFunctionalTest method setUp.
@SuppressWarnings("unchecked")
@Before
public void setUp() throws Exception {
queryFile = TMP.newFile().toPath();
final Map<String, Object> properties = ImmutableMap.<String, Object>builder().putAll(KsqlConfigTestUtil.baseTestConfig()).put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, TEST_HARNESS.kafkaBootstrapServers()).put(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY, "http://foo:8080").build();
final Function<KsqlConfig, ServiceContext> serviceContextFactory = config -> TestServiceContext.create(new KsqlConfig(properties), TEST_HARNESS.getServiceContext().getSchemaRegistryClientFactory());
standalone = StandaloneExecutorFactory.create((Map) properties, queryFile.toString(), ".", serviceContextFactory, KafkaConfigStore::new, activeQuerySupplier -> versionChecker, StandaloneExecutor::new, new MetricCollectors());
s1 = KsqlIdentifierTestUtil.uniqueIdentifierName("S1");
s2 = KsqlIdentifierTestUtil.uniqueIdentifierName("S2");
t1 = KsqlIdentifierTestUtil.uniqueIdentifierName("T1");
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class TemporaryEngine method before.
@Override
protected void before() {
final InternalFunctionRegistry functionRegistry = new InternalFunctionRegistry();
metaStore = new MetaStoreImpl(functionRegistry);
serviceContext = TestServiceContext.create();
engine = (KsqlEngineTestUtil.createKsqlEngine(getServiceContext(), metaStore, new MetricCollectors()));
ksqlConfig = KsqlConfigTestUtil.create("localhost:9092", ImmutableMap.<String, Object>builder().putAll(configs).put("ksql.command.topic.suffix", "commands").put(KsqlRestConfig.LISTENERS_CONFIG, "http://localhost:8088").build());
final SqlTypeParser typeParser = SqlTypeParser.create(TypeRegistry.EMPTY);
final Optional<Metrics> noMetrics = Optional.empty();
final UdfLoader udfLoader = new UdfLoader(functionRegistry, noMetrics, typeParser, true);
udfLoader.loadUdfFromClass(TestUdf1.class, "test");
final UdtfLoader udtfLoader = new UdtfLoader(functionRegistry, noMetrics, typeParser, true);
udtfLoader.loadUdtfFromClass(TestUdtf1.class, "whatever");
udtfLoader.loadUdtfFromClass(TestUdtf2.class, "whatever");
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class RecoveryTest method createKsqlEngine.
private KsqlEngine createKsqlEngine(final QueryIdGenerator queryIdGenerator) {
final KsqlEngineMetrics engineMetrics = mock(KsqlEngineMetrics.class);
when(engineMetrics.getQueryEventListener()).thenReturn(mock(QueryEventListener.class));
return KsqlEngineTestUtil.createKsqlEngine(serviceContext, new MetaStoreImpl(new InternalFunctionRegistry()), ignored -> engineMetrics, queryIdGenerator, ksqlConfig, new MetricCollectors());
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class InteractiveStatementExecutorTest method setUp.
@Before
public void setUp() {
ksqlConfig = KsqlConfigTestUtil.create(CLUSTER, ImmutableMap.of(StreamsConfig.APPLICATION_SERVER_CONFIG, "http://host:1234"));
final FakeKafkaTopicClient fakeKafkaTopicClient = new FakeKafkaTopicClient();
fakeKafkaTopicClient.createTopic("pageview_topic", 1, (short) 1, emptyMap());
fakeKafkaTopicClient.createTopic("foo", 1, (short) 1, emptyMap());
fakeKafkaTopicClient.createTopic("pageview_topic_json", 1, (short) 1, emptyMap());
serviceContext = TestServiceContext.create(fakeKafkaTopicClient);
final SpecificQueryIdGenerator hybridQueryIdGenerator = new SpecificQueryIdGenerator();
final MetricCollectors metricCollectors = new MetricCollectors();
ksqlEngine = KsqlEngineTestUtil.createKsqlEngine(serviceContext, new MetaStoreImpl(new InternalFunctionRegistry()), (engine) -> new KsqlEngineMetrics("", engine, Collections.emptyMap(), Optional.empty(), metricCollectors), hybridQueryIdGenerator, ksqlConfig, metricCollectors);
statementParser = new StatementParser(ksqlEngine);
statementExecutor = new InteractiveStatementExecutor(serviceContext, ksqlEngine, statementParser, hybridQueryIdGenerator, InternalTopicSerdes.deserializer(Command.class));
statementExecutorWithMocks = new InteractiveStatementExecutor(serviceContext, mockEngine, mockParser, mockQueryIdGenerator, commandDeserializer);
statementExecutor.configure(ksqlConfig);
statementExecutorWithMocks.configure(ksqlConfig);
plannedCommand = new Command(CREATE_STREAM_FOO_STATEMENT, emptyMap(), ksqlConfig.getAllConfigPropsWithSecretsObfuscated(), Optional.of(plan));
}
Aggregations