Search in sources :

Example 1 with ServiceContext

use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.

the class PushRouting method connectToHosts.

/**
 * Connects to all of the hosts provided.
 * @return A future for a PushConnectionsHandle, which can be used to terminate connections.
 */
@SuppressWarnings("checkstyle:ParameterNumber")
private CompletableFuture<PushConnectionsHandle> connectToHosts(final ServiceContext serviceContext, final PushPhysicalPlanManager pushPhysicalPlanManager, final ConfiguredStatement<Query> statement, final Collection<KsqlNode> hosts, final LogicalSchema outputSchema, final TransientQueryQueue transientQueryQueue, final PushConnectionsHandle pushConnectionsHandle, final boolean dynamicallyAddedNode, final Optional<ScalablePushQueryMetrics> scalablePushQueryMetrics, final Set<KsqlNode> catchupHosts, final PushRoutingOptions pushRoutingOptions, final String thisHostName) {
    final Map<KsqlNode, CompletableFuture<RoutingResult>> futureMap = new LinkedHashMap<>();
    for (final KsqlNode node : hosts) {
        pushConnectionsHandle.add(node, new RoutingResult(RoutingResultStatus.IN_PROGRESS, () -> {
        }));
        final CompletableFuture<Void> callback = new CompletableFuture<>();
        callback.handle((v, t) -> {
            if (t == null) {
                pushConnectionsHandle.get(node).ifPresent(result -> {
                    result.close();
                    result.updateStatus(RoutingResultStatus.COMPLETE);
                });
                LOG.info("Host {} completed request {}.", node, pushPhysicalPlanManager.getQueryId());
            } else if (t instanceof GapFoundException) {
                pushConnectionsHandle.get(node).ifPresent(result -> {
                    result.close();
                    result.updateStatus(RoutingResultStatus.OFFSET_GAP_FOUND);
                });
            } else {
                pushConnectionsHandle.completeExceptionally(t);
            }
            return null;
        });
        futureMap.put(node, executeOrRouteQuery(node, statement, serviceContext, pushPhysicalPlanManager, outputSchema, transientQueryQueue, callback, scalablePushQueryMetrics, pushConnectionsHandle.getOffsetsTracker(), catchupHosts.contains(node), pushRoutingOptions, thisHostName));
    }
    return CompletableFuture.allOf(futureMap.values().toArray(new CompletableFuture[0])).thenApply(v -> {
        for (final KsqlNode node : hosts) {
            final CompletableFuture<RoutingResult> future = futureMap.get(node);
            final RoutingResult routingResult = future.join();
            pushConnectionsHandle.add(node, routingResult);
        }
        return pushConnectionsHandle;
    }).exceptionally(t -> {
        final KsqlNode node = futureMap.entrySet().stream().filter(e -> e.getValue().isCompletedExceptionally()).map(Entry::getKey).findFirst().orElse(null);
        for (KsqlNode n : hosts) {
            final CompletableFuture<RoutingResult> future = futureMap.get(n);
            // Take whatever completed exceptionally and mark it as failed
            if (future.isCompletedExceptionally()) {
                pushConnectionsHandle.get(n).ifPresent(result -> result.updateStatus(RoutingResultStatus.FAILED));
            } else {
                final RoutingResult routingResult = future.join();
                pushConnectionsHandle.add(node, routingResult);
            }
        }
        LOG.warn("Error routing query {} id {} to host {} at timestamp {} with exception {}", statement.getStatementText(), pushPhysicalPlanManager.getQueryId(), node, System.currentTimeMillis(), t.getCause());
        // retries in that case and don't fail the original request.
        if (!dynamicallyAddedNode) {
            pushConnectionsHandle.completeExceptionally(new KsqlException(String.format("Unable to execute push query \"%s\". %s", statement.getStatementText(), t.getCause().getMessage())));
        }
        return pushConnectionsHandle;
    }).exceptionally(t -> {
        LOG.error("Unexpected error handing exception", t);
        return pushConnectionsHandle;
    });
}
Also used : Query(io.confluent.ksql.parser.tree.Query) LoadingCache(com.google.common.cache.LoadingCache) KeyValue(io.confluent.ksql.util.KeyValue) ServiceContext(io.confluent.ksql.services.ServiceContext) LoggerFactory(org.slf4j.LoggerFactory) RowMetadata(io.confluent.ksql.util.RowMetadata) DataRow(io.confluent.ksql.rest.entity.StreamedRow.DataRow) RestResponse(io.confluent.ksql.rest.client.RestResponse) Context(io.vertx.core.Context) KsqlErrorMessage(io.confluent.ksql.rest.entity.KsqlErrorMessage) PushContinuationToken(io.confluent.ksql.rest.entity.PushContinuationToken) QueryRow(io.confluent.ksql.physical.common.QueryRow) Map(java.util.Map) QueryId(io.confluent.ksql.query.QueryId) KsqlRequestConfig(io.confluent.ksql.util.KsqlRequestConfig) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) ScalablePushQueryMetrics(io.confluent.ksql.internal.ScalablePushQueryMetrics) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) BufferedPublisher(io.confluent.ksql.reactive.BufferedPublisher) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) CacheLoader(com.google.common.cache.CacheLoader) KsqlNode(io.confluent.ksql.physical.scalablepush.locator.PushLocator.KsqlNode) BaseSubscriber(io.confluent.ksql.reactive.BaseSubscriber) List(java.util.List) VertxUtils(io.confluent.ksql.util.VertxUtils) Entry(java.util.Map.Entry) KsqlException(io.confluent.ksql.util.KsqlException) Optional(java.util.Optional) TransientQueryQueue(io.confluent.ksql.query.TransientQueryQueue) CacheBuilder(com.google.common.cache.CacheBuilder) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) KeyValueMetadata(io.confluent.ksql.util.KeyValueMetadata) PushOffsetRange(io.confluent.ksql.util.PushOffsetRange) OffsetVector(io.confluent.ksql.util.OffsetVector) BiConsumer(java.util.function.BiConsumer) Logger(org.slf4j.Logger) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) GenericRow(io.confluent.ksql.GenericRow) Subscription(org.reactivestreams.Subscription) Preconditions(com.google.common.base.Preconditions) VisibleForTesting(com.google.common.annotations.VisibleForTesting) PushOffsetVector(io.confluent.ksql.util.PushOffsetVector) Collections(java.util.Collections) CompletableFuture(java.util.concurrent.CompletableFuture) KsqlNode(io.confluent.ksql.physical.scalablepush.locator.PushLocator.KsqlNode) KsqlException(io.confluent.ksql.util.KsqlException) LinkedHashMap(java.util.LinkedHashMap)

Example 2 with ServiceContext

use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.

the class PushRouting method checkForNewHosts.

private void checkForNewHosts(final ServiceContext serviceContext, final PushPhysicalPlanManager pushPhysicalPlanManager, final ConfiguredStatement<Query> statement, final LogicalSchema outputSchema, final TransientQueryQueue transientQueryQueue, final PushConnectionsHandle pushConnectionsHandle, final Optional<ScalablePushQueryMetrics> scalablePushQueryMetrics, final PushRoutingOptions pushRoutingOptions, final String thisHostName) {
    VertxUtils.checkContext(pushPhysicalPlanManager.getContext());
    if (pushConnectionsHandle.isClosed()) {
        return;
    }
    final Set<KsqlNode> updatedHosts = registryToNodes.apply(pushPhysicalPlanManager.getScalablePushRegistry());
    final Set<KsqlNode> hosts = pushConnectionsHandle.getActiveHosts();
    final Set<KsqlNode> newHosts = Sets.difference(updatedHosts, hosts).stream().filter(node -> pushConnectionsHandle.get(node).map(routingResult -> routingResult.getStatus() != RoutingResultStatus.IN_PROGRESS).orElse(true)).collect(Collectors.toSet());
    final Set<KsqlNode> removedHosts = Sets.difference(hosts, updatedHosts);
    if (newHosts.size() > 0) {
        LOG.info("Dynamically adding new hosts {} for {}", newHosts, pushPhysicalPlanManager.getQueryId());
        final Set<KsqlNode> catchupHosts = newHosts.stream().filter(node -> pushConnectionsHandle.get(node).map(routingResult -> routingResult.getStatus() == RoutingResultStatus.OFFSET_GAP_FOUND).orElse(false)).collect(Collectors.toSet());
        connectToHosts(serviceContext, pushPhysicalPlanManager, statement, newHosts, outputSchema, transientQueryQueue, pushConnectionsHandle, true, scalablePushQueryMetrics, catchupHosts, pushRoutingOptions, thisHostName);
    }
    if (removedHosts.size() > 0) {
        LOG.info("Dynamically removing hosts {} for {}", removedHosts, pushPhysicalPlanManager.getQueryId());
        for (final KsqlNode node : removedHosts) {
            final RoutingResult result = pushConnectionsHandle.remove(node);
            result.close();
            result.updateStatus(RoutingResultStatus.REMOVED);
        }
    }
    pushPhysicalPlanManager.getContext().owner().setTimer(clusterCheckInterval, timerId -> checkForNewHosts(serviceContext, pushPhysicalPlanManager, statement, outputSchema, transientQueryQueue, pushConnectionsHandle, scalablePushQueryMetrics, pushRoutingOptions, thisHostName));
}
Also used : Query(io.confluent.ksql.parser.tree.Query) LoadingCache(com.google.common.cache.LoadingCache) KeyValue(io.confluent.ksql.util.KeyValue) ServiceContext(io.confluent.ksql.services.ServiceContext) LoggerFactory(org.slf4j.LoggerFactory) RowMetadata(io.confluent.ksql.util.RowMetadata) DataRow(io.confluent.ksql.rest.entity.StreamedRow.DataRow) RestResponse(io.confluent.ksql.rest.client.RestResponse) Context(io.vertx.core.Context) KsqlErrorMessage(io.confluent.ksql.rest.entity.KsqlErrorMessage) PushContinuationToken(io.confluent.ksql.rest.entity.PushContinuationToken) QueryRow(io.confluent.ksql.physical.common.QueryRow) Map(java.util.Map) QueryId(io.confluent.ksql.query.QueryId) KsqlRequestConfig(io.confluent.ksql.util.KsqlRequestConfig) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) ScalablePushQueryMetrics(io.confluent.ksql.internal.ScalablePushQueryMetrics) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) BufferedPublisher(io.confluent.ksql.reactive.BufferedPublisher) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) CacheLoader(com.google.common.cache.CacheLoader) KsqlNode(io.confluent.ksql.physical.scalablepush.locator.PushLocator.KsqlNode) BaseSubscriber(io.confluent.ksql.reactive.BaseSubscriber) List(java.util.List) VertxUtils(io.confluent.ksql.util.VertxUtils) Entry(java.util.Map.Entry) KsqlException(io.confluent.ksql.util.KsqlException) Optional(java.util.Optional) TransientQueryQueue(io.confluent.ksql.query.TransientQueryQueue) CacheBuilder(com.google.common.cache.CacheBuilder) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) StreamedRow(io.confluent.ksql.rest.entity.StreamedRow) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) KeyValueMetadata(io.confluent.ksql.util.KeyValueMetadata) PushOffsetRange(io.confluent.ksql.util.PushOffsetRange) OffsetVector(io.confluent.ksql.util.OffsetVector) BiConsumer(java.util.function.BiConsumer) Logger(org.slf4j.Logger) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) GenericRow(io.confluent.ksql.GenericRow) Subscription(org.reactivestreams.Subscription) Preconditions(com.google.common.base.Preconditions) VisibleForTesting(com.google.common.annotations.VisibleForTesting) PushOffsetVector(io.confluent.ksql.util.PushOffsetVector) Collections(java.util.Collections) KsqlNode(io.confluent.ksql.physical.scalablepush.locator.PushLocator.KsqlNode)

Example 3 with ServiceContext

use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.

the class JoinNodeTest method setUp.

@Before
public void setUp() {
    builder = new StreamsBuilder();
    final ServiceContext serviceContext = mock(ServiceContext.class);
    when(serviceContext.getTopicClient()).thenReturn(mockKafkaTopicClient);
    when(planBuildContext.getKsqlConfig()).thenReturn(ksqlConfig);
    when(planBuildContext.getServiceContext()).thenReturn(serviceContext);
    when(planBuildContext.withKsqlConfig(any())).thenReturn(planBuildContext);
    when(planBuildContext.getFunctionRegistry()).thenReturn(functionRegistry);
    when(planBuildContext.buildNodeContext(any())).thenAnswer(inv -> new QueryContext.Stacker().push(inv.getArgument(0).toString()));
    when(executeContext.getKsqlConfig()).thenReturn(ksqlConfig);
    when(executeContext.getStreamsBuilder()).thenReturn(builder);
    when(executeContext.getFunctionRegistry()).thenReturn(functionRegistry);
    when(executeContext.getProcessingLogger(any())).thenReturn(processLogger);
    when(left.getSchema()).thenReturn(LEFT_NODE_SCHEMA);
    when(right.getSchema()).thenReturn(RIGHT_NODE_SCHEMA);
    when(right2.getSchema()).thenReturn(RIGHT2_NODE_SCHEMA);
    when(left.getPartitions(mockKafkaTopicClient)).thenReturn(2);
    when(right.getPartitions(mockKafkaTopicClient)).thenReturn(2);
    when(left.getSourceName()).thenReturn(Optional.of(LEFT_ALIAS));
    when(right.getSourceName()).thenReturn(Optional.of(RIGHT_ALIAS));
    when(joinKey.resolveKeyName(any(), any())).thenReturn(SYNTH_KEY);
    setUpSource(left, VALUE_FORMAT, leftSourceNode, leftSource);
    setUpSource(right, OTHER_FORMAT, rightSourceNode, rightSource);
    setUpSource(right2, OTHER_FORMAT, right2SourceNode, rightSource);
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) ServiceContext(io.confluent.ksql.services.ServiceContext) Before(org.junit.Before)

Example 4 with ServiceContext

use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.

the class ListConnectorsExecutor method execute.

@SuppressWarnings("OptionalGetWithoutIsPresent")
public StatementExecutorResponse execute(final ConfiguredStatement<ListConnectors> configuredStatement, final SessionProperties sessionProperties, final KsqlExecutionContext ksqlExecutionContext, final ServiceContext serviceContext) {
    final ConnectClient connectClient = serviceContext.getConnectClient();
    final ConnectResponse<List<String>> connectors = serviceContext.getConnectClient().connectors();
    if (connectors.error().isPresent()) {
        return StatementExecutorResponse.handled(connectErrorHandler.handle(configuredStatement, connectors));
    }
    final List<SimpleConnectorInfo> infos = new ArrayList<>();
    final List<KsqlWarning> warnings = new ArrayList<>();
    final Scope scope = configuredStatement.getStatement().getScope();
    for (final String name : connectors.datum().get()) {
        final ConnectResponse<ConnectorInfo> response = connectClient.describe(name);
        if (response.datum().filter(i -> inScope(i.type(), scope)).isPresent()) {
            final ConnectResponse<ConnectorStateInfo> status = connectClient.status(name);
            infos.add(fromConnectorInfoResponse(name, response, status));
        } else if (response.error().isPresent()) {
            if (scope == Scope.ALL) {
                infos.add(new SimpleConnectorInfo(name, ConnectorType.UNKNOWN, null, null));
            }
            warnings.add(new KsqlWarning(String.format("Could not describe connector %s: %s", name, response.error().get())));
        }
    }
    return StatementExecutorResponse.handled(Optional.of(new ConnectorList(configuredStatement.getStatementText(), warnings, infos)));
}
Also used : SessionProperties(io.confluent.ksql.rest.SessionProperties) Scope(io.confluent.ksql.parser.tree.ListConnectors.Scope) ConnectClient(io.confluent.ksql.services.ConnectClient) ServiceContext(io.confluent.ksql.services.ServiceContext) SimpleConnectorInfo(io.confluent.ksql.rest.entity.SimpleConnectorInfo) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) ArrayList(java.util.ArrayList) ConnectorStateInfo(org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo) List(java.util.List) ConnectorInfo(org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo) ConnectorList(io.confluent.ksql.rest.entity.ConnectorList) KsqlExecutionContext(io.confluent.ksql.KsqlExecutionContext) State(org.apache.kafka.connect.runtime.AbstractStatus.State) ConnectResponse(io.confluent.ksql.services.ConnectClient.ConnectResponse) Optional(java.util.Optional) ListConnectors(io.confluent.ksql.parser.tree.ListConnectors) AbstractState(org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo.AbstractState) ConnectorType(org.apache.kafka.connect.runtime.rest.entities.ConnectorType) KsqlWarning(io.confluent.ksql.rest.entity.KsqlWarning) ConnectorConfig(org.apache.kafka.connect.runtime.ConnectorConfig) SimpleConnectorInfo(io.confluent.ksql.rest.entity.SimpleConnectorInfo) SimpleConnectorInfo(io.confluent.ksql.rest.entity.SimpleConnectorInfo) ConnectorInfo(org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo) ArrayList(java.util.ArrayList) KsqlWarning(io.confluent.ksql.rest.entity.KsqlWarning) ConnectorList(io.confluent.ksql.rest.entity.ConnectorList) Scope(io.confluent.ksql.parser.tree.ListConnectors.Scope) ConnectClient(io.confluent.ksql.services.ConnectClient) ArrayList(java.util.ArrayList) List(java.util.List) ConnectorList(io.confluent.ksql.rest.entity.ConnectorList) ConnectorStateInfo(org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo)

Example 5 with ServiceContext

use of io.confluent.ksql.services.ServiceContext in project ksql by confluentinc.

the class ListTopicsExecutor method execute.

public static StatementExecutorResponse execute(final ConfiguredStatement<ListTopics> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext) {
    final KafkaTopicClient client = serviceContext.getTopicClient();
    final Map<String, TopicDescription> topicDescriptions = listTopics(client, statement);
    if (statement.getStatement().getShowExtended()) {
        final KafkaConsumerGroupClient consumerGroupClient = new KafkaConsumerGroupClientImpl(serviceContext::getAdminClient);
        final Map<String, List<Integer>> topicConsumersAndGroupCount = getTopicConsumerAndGroupCounts(consumerGroupClient);
        final List<KafkaTopicInfoExtended> topicInfoExtendedList = topicDescriptions.values().stream().map(desc -> topicDescriptionToTopicInfoExtended(desc, topicConsumersAndGroupCount)).collect(Collectors.toList());
        return StatementExecutorResponse.handled(Optional.of(new KafkaTopicsListExtended(statement.getStatementText(), topicInfoExtendedList)));
    } else {
        final List<KafkaTopicInfo> topicInfoList = topicDescriptions.values().stream().map(ListTopicsExecutor::topicDescriptionToTopicInfo).collect(Collectors.toList());
        return StatementExecutorResponse.handled(Optional.of(new KafkaTopicsList(statement.getStatementText(), topicInfoList)));
    }
}
Also used : Arrays(java.util.Arrays) SessionProperties(io.confluent.ksql.rest.SessionProperties) ListTopics(io.confluent.ksql.parser.tree.ListTopics) ServiceContext(io.confluent.ksql.services.ServiceContext) ConsumerSummary(io.confluent.ksql.services.KafkaConsumerGroupClient.ConsumerSummary) HashMap(java.util.HashMap) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) KafkaTopicsListExtended(io.confluent.ksql.rest.entity.KafkaTopicsListExtended) ReservedInternalTopics(io.confluent.ksql.util.ReservedInternalTopics) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) Map(java.util.Map) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) TopicPartition(org.apache.kafka.common.TopicPartition) KafkaConsumerGroupClientImpl(io.confluent.ksql.services.KafkaConsumerGroupClientImpl) Collection(java.util.Collection) Set(java.util.Set) ConfiguredStatement(io.confluent.ksql.statement.ConfiguredStatement) Collectors(java.util.stream.Collectors) List(java.util.List) TreeMap(java.util.TreeMap) KsqlExecutionContext(io.confluent.ksql.KsqlExecutionContext) KafkaConsumerGroupClient(io.confluent.ksql.services.KafkaConsumerGroupClient) KafkaTopicInfoExtended(io.confluent.ksql.rest.entity.KafkaTopicInfoExtended) KafkaTopicInfo(io.confluent.ksql.rest.entity.KafkaTopicInfo) Optional(java.util.Optional) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) KafkaTopicsListExtended(io.confluent.ksql.rest.entity.KafkaTopicsListExtended) KafkaTopicInfo(io.confluent.ksql.rest.entity.KafkaTopicInfo) KafkaTopicClient(io.confluent.ksql.services.KafkaTopicClient) KafkaConsumerGroupClientImpl(io.confluent.ksql.services.KafkaConsumerGroupClientImpl) KafkaTopicInfoExtended(io.confluent.ksql.rest.entity.KafkaTopicInfoExtended) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) KafkaTopicsList(io.confluent.ksql.rest.entity.KafkaTopicsList) ArrayList(java.util.ArrayList) List(java.util.List) KafkaConsumerGroupClient(io.confluent.ksql.services.KafkaConsumerGroupClient)

Aggregations

ServiceContext (io.confluent.ksql.services.ServiceContext)44 Optional (java.util.Optional)25 KsqlConfig (io.confluent.ksql.util.KsqlConfig)23 Collectors (java.util.stream.Collectors)23 List (java.util.List)22 ConfiguredStatement (io.confluent.ksql.statement.ConfiguredStatement)21 KsqlException (io.confluent.ksql.util.KsqlException)21 Map (java.util.Map)21 ImmutableMap (com.google.common.collect.ImmutableMap)18 TestServiceContext (io.confluent.ksql.services.TestServiceContext)15 Collections (java.util.Collections)15 ImmutableList (com.google.common.collect.ImmutableList)13 KsqlExecutionContext (io.confluent.ksql.KsqlExecutionContext)13 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)13 KsqlEngine (io.confluent.ksql.engine.KsqlEngine)12 KsqlStatementException (io.confluent.ksql.util.KsqlStatementException)12 Test (org.junit.Test)12 Logger (org.slf4j.Logger)10 LoggerFactory (org.slf4j.LoggerFactory)10 MatcherAssert.assertThat (org.hamcrest.MatcherAssert.assertThat)9