use of io.confluent.ksql.services.FakeKafkaTopicClient in project ksql by confluentinc.
the class KsqlTesterTest method setUp.
@Before
public void setUp() {
final MockSchemaRegistryClient srClient = new MockSchemaRegistryClient();
this.topicClient = new FakeKafkaTopicClient();
this.serviceContext = TestServiceContext.create(topicClient, () -> srClient);
this.config = new KsqlConfig(BASE_CONFIG);
this.formatInjector = new DefaultFormatInjector();
final MetaStoreImpl metaStore = new MetaStoreImpl(TestFunctionRegistry.INSTANCE.get());
final MetricCollectors metricCollectors = new MetricCollectors();
this.engine = new KsqlEngine(serviceContext, NoopProcessingLogContext.INSTANCE, metaStore, ServiceInfo.create(config), new SequentialQueryIdGenerator(), this.config, Collections.singletonList(new QueryEventListener() {
@Override
public void onDeregister(QueryMetadata query) {
final DriverAndProperties driverAndProperties = drivers.get(query.getQueryId());
closeDriver(driverAndProperties.driver, driverAndProperties.properties, false);
}
}), metricCollectors);
this.expectedException = null;
this.expectedMessage = null;
this.overrides = new HashMap<>();
this.driverPipeline = new TestDriverPipeline();
}
use of io.confluent.ksql.services.FakeKafkaTopicClient in project ksql by confluentinc.
the class InteractiveStatementExecutorTest method setUp.
@Before
public void setUp() {
ksqlConfig = KsqlConfigTestUtil.create(CLUSTER, ImmutableMap.of(StreamsConfig.APPLICATION_SERVER_CONFIG, "http://host:1234"));
final FakeKafkaTopicClient fakeKafkaTopicClient = new FakeKafkaTopicClient();
fakeKafkaTopicClient.createTopic("pageview_topic", 1, (short) 1, emptyMap());
fakeKafkaTopicClient.createTopic("foo", 1, (short) 1, emptyMap());
fakeKafkaTopicClient.createTopic("pageview_topic_json", 1, (short) 1, emptyMap());
serviceContext = TestServiceContext.create(fakeKafkaTopicClient);
final SpecificQueryIdGenerator hybridQueryIdGenerator = new SpecificQueryIdGenerator();
final MetricCollectors metricCollectors = new MetricCollectors();
ksqlEngine = KsqlEngineTestUtil.createKsqlEngine(serviceContext, new MetaStoreImpl(new InternalFunctionRegistry()), (engine) -> new KsqlEngineMetrics("", engine, Collections.emptyMap(), Optional.empty(), metricCollectors), hybridQueryIdGenerator, ksqlConfig, metricCollectors);
statementParser = new StatementParser(ksqlEngine);
statementExecutor = new InteractiveStatementExecutor(serviceContext, ksqlEngine, statementParser, hybridQueryIdGenerator, InternalTopicSerdes.deserializer(Command.class));
statementExecutorWithMocks = new InteractiveStatementExecutor(serviceContext, mockEngine, mockParser, mockQueryIdGenerator, commandDeserializer);
statementExecutor.configure(ksqlConfig);
statementExecutorWithMocks.configure(ksqlConfig);
plannedCommand = new Command(CREATE_STREAM_FOO_STATEMENT, emptyMap(), ksqlConfig.getAllConfigPropsWithSecretsObfuscated(), Optional.of(plan));
}
use of io.confluent.ksql.services.FakeKafkaTopicClient in project ksql by confluentinc.
the class KsqlResourceTest method setUp.
@Before
public void setUp() throws IOException, RestClientException {
commandStatus = new QueuedCommandStatus(0, new CommandStatusFuture(new CommandId(TOPIC, "whateva", CREATE)));
commandStatus1 = new QueuedCommandStatus(1, new CommandStatusFuture(new CommandId(TABLE, "something", DROP)));
final QueuedCommandStatus commandStatus2 = new QueuedCommandStatus(2, new CommandStatusFuture(new CommandId(STREAM, "something", EXECUTE)));
kafkaTopicClient = new FakeKafkaTopicClient();
kafkaConsumerGroupClient = new FakeKafkaConsumerGroupClient();
serviceContext = TestServiceContext.create(kafkaTopicClient, kafkaConsumerGroupClient);
schemaRegistryClient = serviceContext.getSchemaRegistryClient();
registerValueSchema(schemaRegistryClient);
ksqlRestConfig = new KsqlRestConfig(getDefaultKsqlConfig());
ksqlConfig = new KsqlConfig(ksqlRestConfig.getKsqlConfigProperties());
final KsqlExecutionContext.ExecuteResult result = mock(KsqlExecutionContext.ExecuteResult.class);
when(sandbox.execute(any(), any(ConfiguredKsqlPlan.class))).thenReturn(result);
when(result.getQuery()).thenReturn(Optional.empty());
MutableFunctionRegistry fnRegistry = new InternalFunctionRegistry();
final Metrics metrics = new Metrics();
UserFunctionLoader.newInstance(ksqlConfig, fnRegistry, ".", metrics).load();
metaStore = new MetaStoreImpl(fnRegistry);
final MetricCollectors metricCollectors = new MetricCollectors(metrics);
realEngine = KsqlEngineTestUtil.createKsqlEngine(serviceContext, metaStore, (engine) -> new KsqlEngineMetrics("", engine, Collections.emptyMap(), Optional.empty(), metricCollectors), new SequentialQueryIdGenerator(), ksqlConfig, metricCollectors);
securityContext = new KsqlSecurityContext(Optional.empty(), serviceContext);
when(commandRunner.getCommandQueue()).thenReturn(commandStore);
when(commandRunnerWarning.get()).thenReturn("");
when(commandStore.createTransactionalProducer()).thenReturn(transactionalProducer);
ksqlEngine = realEngine;
when(sandbox.getMetaStore()).thenAnswer(inv -> metaStore.copy());
addTestTopicAndSources();
when(commandStore.enqueueCommand(any(), any(), any(Producer.class))).thenReturn(commandStatus).thenReturn(commandStatus1).thenReturn(commandStatus2);
streamName = KsqlIdentifierTestUtil.uniqueIdentifierName();
when(schemaInjectorFactory.apply(any())).thenReturn(sandboxSchemaInjector);
when(schemaInjectorFactory.apply(serviceContext)).thenReturn(schemaInjector);
when(topicInjectorFactory.apply(any())).thenReturn(sandboxTopicInjector);
when(topicInjectorFactory.apply(ksqlEngine)).thenReturn(topicInjector);
when(sandboxSchemaInjector.inject(any())).thenAnswer(inv -> inv.getArgument(0));
when(schemaInjector.inject(any())).thenAnswer(inv -> inv.getArgument(0));
when(sandboxTopicInjector.inject(any())).thenAnswer(inv -> inv.getArgument(0));
when(topicInjector.inject(any())).thenAnswer(inv -> inv.getArgument(0));
when(errorsHandler.generateResponse(any(), any())).thenAnswer(new Answer<EndpointResponse>() {
@Override
public EndpointResponse answer(final InvocationOnMock invocation) throws Throwable {
final Object[] args = invocation.getArguments();
return (EndpointResponse) args[1];
}
});
setUpKsqlResource();
}
use of io.confluent.ksql.services.FakeKafkaTopicClient in project ksql by confluentinc.
the class TransientQueryCleanupServiceTest method setUp.
@Before
public void setUp() {
service.setQueryRegistry(registry);
when(q1.getQueryId()).thenReturn(new QueryId(APP_ID_1));
when(q2.getQueryId()).thenReturn(new QueryId(APP_ID_2));
when(q3.getQueryId()).thenReturn(new QueryId(APP_ID_3));
when(q4.getQueryId()).thenReturn(new QueryId(APP_ID_4));
when(q5.getQueryId()).thenReturn(new QueryId(APP_ID_5));
FakeKafkaTopicClient client = new FakeKafkaTopicClient();
ALL_APP_IDS.forEach(id -> {
client.createTopic(id + "-KafkaTopic_Right-Reduce-changelog", 1, (short) 1);
client.createTopic(id + "-Join-repartition", 1, (short) 1);
});
when(service.getTopicClient()).thenReturn(client);
}
Aggregations