Search in sources :

Example 6 with KsqlServerException

use of io.confluent.ksql.util.KsqlServerException in project ksql by confluentinc.

the class TestExecutor method validateCreatedMessage.

private static void validateCreatedMessage(final String topicName, final Record expectedRecord, final ProducerRecord<?, ?> actualProducerRecord, final boolean ranWithInsertStatements, final int messageIndex) {
    final Object actualKey = coerceRecordFields(actualProducerRecord.key());
    final Object actualValue = coerceRecordFields(actualProducerRecord.value());
    final long actualTimestamp = actualProducerRecord.timestamp();
    final Headers actualHeaders = actualProducerRecord.headers();
    final JsonNode expectedKey = expectedRecord.getJsonKey().orElse(NullNode.getInstance());
    final JsonNode expectedValue = expectedRecord.getJsonValue().orElseThrow(() -> new KsqlServerException("could not get expected value from test record: " + expectedRecord));
    final long expectedTimestamp = expectedRecord.timestamp().orElse(actualTimestamp);
    final List<TestHeader> expectedHeaders = expectedRecord.headers().orElse(ImmutableList.of());
    final AssertionError error = new AssertionError("Topic '" + topicName + "', message " + messageIndex + ": Expected <" + expectedKey + ", " + expectedValue + "> " + "with timestamp=" + expectedTimestamp + " and headers=[" + printHeaders(expectedHeaders) + "] but was " + getProducerRecordInString(actualProducerRecord));
    if (expectedRecord.getWindow() != null) {
        final Windowed<?> windowed = (Windowed<?>) actualKey;
        if (!new WindowData(windowed).equals(expectedRecord.getWindow()) || !ExpectedRecordComparator.matches(((Windowed<?>) actualKey).key(), expectedKey)) {
            throw error;
        }
    } else {
        if (!ExpectedRecordComparator.matches(actualKey, expectedKey)) {
            throw error;
        }
    }
    if (!ExpectedRecordComparator.matches(actualValue, expectedValue)) {
        throw error;
    }
    if (!ExpectedRecordComparator.matches(actualHeaders.toArray(), expectedHeaders)) {
        throw error;
    }
    if ((actualTimestamp != expectedTimestamp) && (!ranWithInsertStatements || expectedTimestamp != 0L)) {
        throw error;
    }
}
Also used : Windowed(org.apache.kafka.streams.kstream.Windowed) TestHeader(io.confluent.ksql.test.model.TestHeader) WindowData(io.confluent.ksql.test.model.WindowData) Headers(org.apache.kafka.common.header.Headers) JsonNode(com.fasterxml.jackson.databind.JsonNode) KsqlServerException(io.confluent.ksql.util.KsqlServerException)

Example 7 with KsqlServerException

use of io.confluent.ksql.util.KsqlServerException in project ksql by confluentinc.

the class LocalCommandsTest method shouldNotThrowWhenFailToCleanup.

@Test
public void shouldNotThrowWhenFailToCleanup() throws IOException {
    // Given
    final File dir = commandsDir.newFolder();
    LocalCommands localCommands = LocalCommands.open(ksqlEngine, dir);
    doThrow(new KsqlServerException("Error")).when(localCommandsFile).readRecords();
    // When
    localCommands.write(metadata1);
    LocalCommands localCommands2 = LocalCommands.open(ksqlEngine, dir);
    localCommands2.write(metadata3);
    // Need to create a new local commands in order not to skip the "current" file we just wrote.
    localCommands2.processLocalCommandFiles(serviceContext);
    // Then no exception should be thrown
    verify(ksqlEngine).cleanupOrphanedInternalTopics(any(), eq(ImmutableSet.of(QUERY_APP_ID1)));
}
Also used : File(java.io.File) KsqlServerException(io.confluent.ksql.util.KsqlServerException) Test(org.junit.Test)

Example 8 with KsqlServerException

use of io.confluent.ksql.util.KsqlServerException in project ksql by confluentinc.

the class KafkaClusterUtil method getConfig.

public static Config getConfig(final Admin adminClient) {
    try {
        final Collection<Node> brokers = adminClient.describeCluster().nodes().get();
        final Node broker = Iterables.getFirst(brokers, null);
        if (broker == null) {
            LOG.warn("No available broker found to fetch config info.");
            throw new KsqlServerException("AdminClient discovered an empty Kafka Cluster. " + "Check that Kafka is deployed and KSQL is properly configured.");
        }
        final ConfigResource configResource = new ConfigResource(ConfigResource.Type.BROKER, broker.idString());
        final Map<ConfigResource, Config> brokerConfig = ExecutorUtil.executeWithRetries(() -> adminClient.describeConfigs(Collections.singleton(configResource)).all().get(), ExecutorUtil.RetryBehaviour.ON_RETRYABLE);
        return brokerConfig.get(configResource);
    } catch (final KsqlServerException e) {
        throw e;
    } catch (final ClusterAuthorizationException e) {
        throw new KsqlServerException("Could not get Kafka cluster configuration. " + "Please ensure the ksql principal has " + AclOperation.DESCRIBE_CONFIGS + " rights " + "on the Kafka cluster." + System.lineSeparator() + "See " + DocumentationLinks.SECURITY_REQUIRED_ACLS_DOC_URL + " for more info.", e);
    } catch (final Exception e) {
        throw new KsqlServerException("Could not get Kafka cluster configuration!", e);
    }
}
Also used : Config(org.apache.kafka.clients.admin.Config) Node(org.apache.kafka.common.Node) KsqlServerException(io.confluent.ksql.util.KsqlServerException) ConfigResource(org.apache.kafka.common.config.ConfigResource) ClusterAuthorizationException(org.apache.kafka.common.errors.ClusterAuthorizationException) ClusterAuthorizationException(org.apache.kafka.common.errors.ClusterAuthorizationException) KsqlServerException(io.confluent.ksql.util.KsqlServerException) UnsupportedVersionException(org.apache.kafka.common.errors.UnsupportedVersionException)

Example 9 with KsqlServerException

use of io.confluent.ksql.util.KsqlServerException in project ksql by confluentinc.

the class DefaultConnectClient method delete.

@Override
public ConnectResponse<String> delete(final String connector) {
    try {
        LOG.debug("Issuing request to Kafka Connect at URI {} to delete {}", connectUri, connector);
        final ConnectResponse<String> connectResponse = withRetries(() -> Request.delete(resolveUri(String.format("%s/%s", CONNECTORS, connector))).setHeaders(requestHeaders).responseTimeout(Timeout.ofMilliseconds(requestTimeoutMs)).connectTimeout(Timeout.ofMilliseconds(requestTimeoutMs)).execute(httpClient).handleResponse(createHandler(ImmutableList.of(HttpStatus.SC_NO_CONTENT, HttpStatus.SC_OK), new TypeReference<Object>() {
        }, foo -> connector)));
        connectResponse.error().ifPresent(error -> LOG.warn("Could not delete connector: {}.", error));
        return connectResponse;
    } catch (final Exception e) {
        throw new KsqlServerException(e);
    }
}
Also used : RetryException(com.github.rholder.retry.RetryException) URISyntaxException(java.net.URISyntaxException) ExecutionException(java.util.concurrent.ExecutionException) KsqlServerException(io.confluent.ksql.util.KsqlServerException) KsqlException(io.confluent.ksql.util.KsqlException) KsqlServerException(io.confluent.ksql.util.KsqlServerException)

Example 10 with KsqlServerException

use of io.confluent.ksql.util.KsqlServerException in project ksql by confluentinc.

the class DefaultConnectClient method describe.

@Override
public ConnectResponse<ConnectorInfo> describe(final String connector) {
    try {
        LOG.debug("Issuing request to Kafka Connect at URI {} to get config for {}", connectUri, connector);
        final ConnectResponse<ConnectorInfo> connectResponse = withRetries(() -> Request.get(resolveUri(String.format("%s/%s", CONNECTORS, connector))).setHeaders(requestHeaders).responseTimeout(Timeout.ofMilliseconds(requestTimeoutMs)).connectTimeout(Timeout.ofMilliseconds(requestTimeoutMs)).execute(httpClient).handleResponse(createHandler(HttpStatus.SC_OK, new TypeReference<ConnectorInfo>() {
        }, Function.identity())));
        connectResponse.error().ifPresent(error -> LOG.warn("Could not list connectors: {}.", error));
        return connectResponse;
    } catch (final Exception e) {
        throw new KsqlServerException(e);
    }
}
Also used : ConnectorInfo(org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo) RetryException(com.github.rholder.retry.RetryException) URISyntaxException(java.net.URISyntaxException) ExecutionException(java.util.concurrent.ExecutionException) KsqlServerException(io.confluent.ksql.util.KsqlServerException) KsqlException(io.confluent.ksql.util.KsqlException) KsqlServerException(io.confluent.ksql.util.KsqlServerException)

Aggregations

KsqlServerException (io.confluent.ksql.util.KsqlServerException)21 KsqlException (io.confluent.ksql.util.KsqlException)13 ExecutionException (java.util.concurrent.ExecutionException)10 RetryException (com.github.rholder.retry.RetryException)8 URISyntaxException (java.net.URISyntaxException)8 List (java.util.List)6 ImmutableList (com.google.common.collect.ImmutableList)5 ImmutableMap (com.google.common.collect.ImmutableMap)3 SuppressFBWarnings (edu.umd.cs.findbugs.annotations.SuppressFBWarnings)3 MetaStore (io.confluent.ksql.metastore.MetaStore)3 Statement (io.confluent.ksql.parser.tree.Statement)3 JsonNode (com.fasterxml.jackson.databind.JsonNode)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 Builder (com.google.common.collect.ImmutableList.Builder)2 KsqlExecutionContext (io.confluent.ksql.KsqlExecutionContext)2 ServiceInfo (io.confluent.ksql.ServiceInfo)2 Analysis (io.confluent.ksql.analyzer.Analysis)2 ImmutableAnalysis (io.confluent.ksql.analyzer.ImmutableAnalysis)2 QueryAnalyzer (io.confluent.ksql.analyzer.QueryAnalyzer)2 RewrittenAnalysis (io.confluent.ksql.analyzer.RewrittenAnalysis)2