use of io.confluent.ksql.function.InternalFunctionRegistry in project ksql by confluentinc.
the class TestExecutor method getKsqlEngine.
static KsqlEngine getKsqlEngine(final ServiceContext serviceContext, final Optional<String> extensionDir) {
final FunctionRegistry functionRegistry;
if (extensionDir.isPresent()) {
final MutableFunctionRegistry mutable = new InternalFunctionRegistry();
UdfLoaderUtil.load(mutable, extensionDir.get());
functionRegistry = mutable;
} else {
functionRegistry = TestFunctionRegistry.INSTANCE.get();
}
final MutableMetaStore metaStore = new MetaStoreImpl(functionRegistry);
final MetricCollectors metricCollectors = new MetricCollectors();
return new KsqlEngine(serviceContext, ProcessingLogContext.create(), "test_instance_", metaStore, (engine) -> new KsqlEngineMetrics("", engine, Collections.emptyMap(), Optional.empty(), metricCollectors), new SequentialQueryIdGenerator(), KsqlConfig.empty(), Collections.emptyList(), metricCollectors);
}
use of io.confluent.ksql.function.InternalFunctionRegistry in project ksql by confluentinc.
the class JsonFormatTest method before.
@Before
public void before() {
streamName = "STREAM_" + COUNTER.getAndIncrement();
ksqlConfig = KsqlConfigTestUtil.create(TEST_HARNESS.kafkaBootstrapServers());
serviceContext = ServiceContextFactory.create(ksqlConfig, DisabledKsqlClient::instance);
functionRegistry = new InternalFunctionRegistry();
UserFunctionLoader.newInstance(ksqlConfig, functionRegistry, ".", new Metrics()).load();
ksqlEngine = new KsqlEngine(serviceContext, ProcessingLogContext.create(), functionRegistry, ServiceInfo.create(ksqlConfig), new SequentialQueryIdGenerator(), ksqlConfig, Collections.emptyList(), new MetricCollectors());
topicClient = serviceContext.getTopicClient();
metaStore = ksqlEngine.getMetaStore();
createInitTopics();
produceInitData();
execInitCreateStreamQueries();
}
use of io.confluent.ksql.function.InternalFunctionRegistry in project ksql by confluentinc.
the class RequestValidatorTest method setUp.
@Before
public void setUp() {
metaStore = new MetaStoreImpl(new InternalFunctionRegistry());
when(sandboxEngine.prepare(any(), any())).thenAnswer(invocation -> KSQL_PARSER.prepare(invocation.getArgument(0), metaStore));
when(sandboxEngine.getKsqlConfig()).thenReturn(ksqlConfig);
executionContext = sandboxEngine;
serviceContext = SandboxedServiceContext.create(TestServiceContext.create());
when(ksqlConfig.getInt(KsqlConfig.KSQL_ACTIVE_PERSISTENT_QUERY_LIMIT_CONFIG)).thenReturn(Integer.MAX_VALUE);
when(schemaInjector.inject(any())).thenAnswer(inv -> inv.getArgument(0));
when(topicInjector.inject(any())).thenAnswer(inv -> inv.getArgument(0));
final KsqlStream<?> source = mock(KsqlStream.class);
when(source.getName()).thenReturn(SourceName.of("SOURCE"));
final KsqlStream<?> sink = mock(KsqlStream.class);
when(sink.getName()).thenReturn(SourceName.of("SINK"));
metaStore.putSource(source, false);
metaStore.putSource(sink, false);
givenRequestValidator(ImmutableMap.of());
}
use of io.confluent.ksql.function.InternalFunctionRegistry in project ksql by confluentinc.
the class SchemaKStreamTest method init.
@Before
@SuppressWarnings("rawtypes")
public void init() {
functionRegistry = new InternalFunctionRegistry();
schemaResolver = new StepSchemaResolver(ksqlConfig, functionRegistry);
ksqlStream = (KsqlStream) metaStore.getSource(SourceName.of("TEST1"));
final KsqlTable<?> ksqlTable = (KsqlTable) metaStore.getSource(SourceName.of("TEST2"));
schemaKTable = new SchemaKTable(tableSourceStep, ksqlTable.getSchema(), keyFormat, ksqlConfig, functionRegistry);
schemaKStream = new SchemaKStream(streamSourceStep, ksqlStream.getSchema(), keyFormat, ksqlConfig, functionRegistry);
}
use of io.confluent.ksql.function.InternalFunctionRegistry in project ksql by confluentinc.
the class SourceTopicsExtractorTest method setUp.
@Before
public void setUp() {
metaStore = new MetaStoreImpl(new InternalFunctionRegistry());
ksqlEngine = KsqlEngineTestUtil.createKsqlEngine(serviceContext, metaStore, new MetricCollectors());
extractor = new SourceTopicsExtractor(metaStore);
givenStreamWithTopic(STREAM_TOPIC_1, TOPIC_1);
givenStreamWithTopic(STREAM_TOPIC_2, TOPIC_2);
}
Aggregations