use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class KsqlResourceTest method shouldDescribeTables.
@Test
public void shouldDescribeTables() {
// Given:
final LogicalSchema schema = LogicalSchema.builder().keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING).valueColumn(ColumnName.of("FIELD1"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("FIELD2"), SqlTypes.STRING).build();
givenSource(DataSourceType.KTABLE, "new_table", "new_topic", schema, ImmutableSet.of(SourceName.of("TEST_TABLE")));
// When:
final SourceDescriptionList descriptionList = makeSingleRequest("DESCRIBE TABLES;", SourceDescriptionList.class);
// Then:
assertThat(descriptionList.getSourceDescriptions(), containsInAnyOrder(SourceDescriptionFactory.create(ksqlEngine.getMetaStore().getSource(SourceName.of("TEST_TABLE")), false, Collections.emptyList(), Collections.emptyList(), Optional.of(kafkaTopicClient.describeTopic("KAFKA_TOPIC_1")), Collections.emptyList(), ImmutableList.of("new_table"), new MetricCollectors()), SourceDescriptionFactory.create(ksqlEngine.getMetaStore().getSource(SourceName.of("new_table")), false, Collections.emptyList(), Collections.emptyList(), Optional.of(kafkaTopicClient.describeTopic("new_topic")), Collections.emptyList(), Collections.emptyList(), new MetricCollectors())));
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class KsqlResourceTest method shouldDescribeStreams.
@Test
public void shouldDescribeStreams() {
// Given:
final LogicalSchema schema = LogicalSchema.builder().keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING).valueColumn(ColumnName.of("FIELD1"), SqlTypes.BOOLEAN).valueColumn(ColumnName.of("FIELD2"), SqlTypes.STRING).build();
givenSource(DataSourceType.KSTREAM, "new_stream", "new_topic", schema);
// When:
final SourceDescriptionList descriptionList = makeSingleRequest("DESCRIBE STREAMS;", SourceDescriptionList.class);
// Then:
assertThat(descriptionList.getSourceDescriptions(), containsInAnyOrder(SourceDescriptionFactory.create(ksqlEngine.getMetaStore().getSource(SourceName.of("TEST_STREAM")), false, Collections.emptyList(), Collections.emptyList(), Optional.of(kafkaTopicClient.describeTopic("KAFKA_TOPIC_2")), Collections.emptyList(), Collections.emptyList(), new MetricCollectors()), SourceDescriptionFactory.create(ksqlEngine.getMetaStore().getSource(SourceName.of("new_stream")), false, Collections.emptyList(), Collections.emptyList(), Optional.of(kafkaTopicClient.describeTopic("new_topic")), Collections.emptyList(), Collections.emptyList(), new MetricCollectors())));
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class KsqlContextTestUtil method create.
public static KsqlContext create(final KsqlConfig ksqlConfig, final SchemaRegistryClient schemaRegistryClient, final FunctionRegistry functionRegistry) {
final KafkaClientSupplier clientSupplier = new DefaultKafkaClientSupplier();
final Admin adminClient = clientSupplier.getAdmin(ksqlConfig.getKsqlAdminClientConfigProps());
final KafkaTopicClient kafkaTopicClient = new KafkaTopicClientImpl(() -> adminClient);
final ServiceContext serviceContext = TestServiceContext.create(clientSupplier, adminClient, kafkaTopicClient, () -> schemaRegistryClient, new DefaultConnectClientFactory(ksqlConfig).get(Optional.empty(), Collections.emptyList(), Optional.empty()));
final String metricsPrefix = "instance-" + COUNTER.getAndIncrement() + "-";
final KsqlEngine engine = new KsqlEngine(serviceContext, ProcessingLogContext.create(), functionRegistry, ServiceInfo.create(ksqlConfig, metricsPrefix), new SequentialQueryIdGenerator(), ksqlConfig, Collections.emptyList(), new MetricCollectors());
return new KsqlContext(serviceContext, ksqlConfig, engine, Injectors.DEFAULT);
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class KsqlTesterTest method setUp.
@Before
public void setUp() {
final MockSchemaRegistryClient srClient = new MockSchemaRegistryClient();
this.topicClient = new FakeKafkaTopicClient();
this.serviceContext = TestServiceContext.create(topicClient, () -> srClient);
this.config = new KsqlConfig(BASE_CONFIG);
this.formatInjector = new DefaultFormatInjector();
final MetaStoreImpl metaStore = new MetaStoreImpl(TestFunctionRegistry.INSTANCE.get());
final MetricCollectors metricCollectors = new MetricCollectors();
this.engine = new KsqlEngine(serviceContext, NoopProcessingLogContext.INSTANCE, metaStore, ServiceInfo.create(config), new SequentialQueryIdGenerator(), this.config, Collections.singletonList(new QueryEventListener() {
@Override
public void onDeregister(QueryMetadata query) {
final DriverAndProperties driverAndProperties = drivers.get(query.getQueryId());
closeDriver(driverAndProperties.driver, driverAndProperties.properties, false);
}
}), metricCollectors);
this.expectedException = null;
this.expectedMessage = null;
this.overrides = new HashMap<>();
this.driverPipeline = new TestDriverPipeline();
}
use of io.confluent.ksql.metrics.MetricCollectors in project ksql by confluentinc.
the class TestExecutor method getKsqlEngine.
static KsqlEngine getKsqlEngine(final ServiceContext serviceContext, final Optional<String> extensionDir) {
final FunctionRegistry functionRegistry;
if (extensionDir.isPresent()) {
final MutableFunctionRegistry mutable = new InternalFunctionRegistry();
UdfLoaderUtil.load(mutable, extensionDir.get());
functionRegistry = mutable;
} else {
functionRegistry = TestFunctionRegistry.INSTANCE.get();
}
final MutableMetaStore metaStore = new MetaStoreImpl(functionRegistry);
final MetricCollectors metricCollectors = new MetricCollectors();
return new KsqlEngine(serviceContext, ProcessingLogContext.create(), "test_instance_", metaStore, (engine) -> new KsqlEngineMetrics("", engine, Collections.emptyMap(), Optional.empty(), metricCollectors), new SequentialQueryIdGenerator(), KsqlConfig.empty(), Collections.emptyList(), metricCollectors);
}
Aggregations