Search in sources :

Example 6 with MaterializationException

use of io.confluent.ksql.execution.streams.materialization.MaterializationException in project ksql by confluentinc.

the class KsLocator method getMetadataForKeys.

/**
 * Gets the Metadata when looking up a list of keys.  This is used when the set of keys are known.
 * @param keys The non-empty set of keys to lookup metadata for
 * @param filterPartitions The partitions to limit lookups to, if non empty. Partitions which
 *                         exist by are not listed here are omitted. If empty, no filtering is
 *                         done.
 * @return The metadata associated with the keys
 */
private List<PartitionMetadata> getMetadataForKeys(final List<KsqlKey> keys, final Set<Integer> filterPartitions) {
    // Maintain request order for reproducibility by using a LinkedHashMap, even though it's
    // not a guarantee of the API.
    final Map<Integer, KeyQueryMetadata> metadataByPartition = new LinkedHashMap<>();
    final Map<Integer, Set<KsqlKey>> keysByPartition = new HashMap<>();
    for (KsqlKey key : keys) {
        final KeyQueryMetadata metadata = getKeyQueryMetadata(key);
        // Fail fast if Streams not ready. Let client handle it
        if (metadata.equals(KeyQueryMetadata.NOT_AVAILABLE)) {
            LOG.debug("KeyQueryMetadata not available for state store '{}' and key {}", storeName, key);
            throw new MaterializationException(String.format("Materialized data for key %s is not available yet. " + "Please try again later.", key));
        }
        LOG.debug("Handling pull query for key {} in partition {} of state store {}.", key, metadata.partition(), storeName);
        if (filterPartitions.size() > 0 && !filterPartitions.contains(metadata.partition())) {
            LOG.debug("Ignoring key {} in partition {} because parition is not included in lookup.", key, metadata.partition());
            continue;
        }
        keysByPartition.computeIfAbsent(metadata.partition(), k -> new LinkedHashSet<>());
        keysByPartition.get(metadata.partition()).add(key);
        metadataByPartition.putIfAbsent(metadata.partition(), metadata);
    }
    return metadataByPartition.values().stream().map(metadata -> {
        final HostInfo activeHost = metadata.activeHost();
        final Set<HostInfo> standByHosts = metadata.standbyHosts();
        return new PartitionMetadata(activeHost, standByHosts, metadata.partition(), Optional.of(keysByPartition.get(metadata.partition())));
    }).collect(Collectors.toList());
}
Also used : UNKNOWN_HOST(org.apache.kafka.streams.processor.internals.StreamsMetadataState.UNKNOWN_HOST) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) URL(java.net.URL) HostInfo(org.apache.kafka.streams.state.HostInfo) RoutingFilterFactory(io.confluent.ksql.execution.streams.RoutingFilter.RoutingFilterFactory) URISyntaxException(java.net.URISyntaxException) KafkaStreamsNamedTopologyWrapper(org.apache.kafka.streams.processor.internals.namedtopology.KafkaStreamsNamedTopologyWrapper) LoggerFactory(org.slf4j.LoggerFactory) RoutingOptions(io.confluent.ksql.execution.streams.RoutingOptions) HashMap(java.util.HashMap) RoutingFilter(io.confluent.ksql.execution.streams.RoutingFilter) KsqlHostInfo(io.confluent.ksql.util.KsqlHostInfo) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) TopologyDescription(org.apache.kafka.streams.TopologyDescription) ImmutableList(com.google.common.collect.ImmutableList) Immutable(com.google.errorprone.annotations.Immutable) Host(io.confluent.ksql.execution.streams.RoutingFilter.Host) Objects.requireNonNull(java.util.Objects.requireNonNull) Map(java.util.Map) URI(java.net.URI) LinkedHashSet(java.util.LinkedHashSet) Processor(org.apache.kafka.streams.TopologyDescription.Processor) Logger(org.slf4j.Logger) Collection(java.util.Collection) StreamsMetadata(org.apache.kafka.streams.StreamsMetadata) Set(java.util.Set) Streams(com.google.common.collect.Streams) Locator(io.confluent.ksql.execution.streams.materialization.Locator) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) Objects(java.util.Objects) List(java.util.List) Stream(java.util.stream.Stream) KeyQueryMetadata(org.apache.kafka.streams.KeyQueryMetadata) Subtopology(org.apache.kafka.streams.TopologyDescription.Subtopology) Serializer(org.apache.kafka.common.serialization.Serializer) Optional(java.util.Optional) Preconditions(com.google.common.base.Preconditions) VisibleForTesting(com.google.common.annotations.VisibleForTesting) GenericKey(io.confluent.ksql.GenericKey) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Collections(java.util.Collections) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) Topology(org.apache.kafka.streams.Topology) Source(org.apache.kafka.streams.TopologyDescription.Source) HashSet(java.util.HashSet) LinkedHashSet(java.util.LinkedHashSet) Set(java.util.Set) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) KeyQueryMetadata(org.apache.kafka.streams.KeyQueryMetadata) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) LinkedHashMap(java.util.LinkedHashMap) HostInfo(org.apache.kafka.streams.state.HostInfo) KsqlHostInfo(io.confluent.ksql.util.KsqlHostInfo)

Example 7 with MaterializationException

use of io.confluent.ksql.execution.streams.materialization.MaterializationException in project ksql by confluentinc.

the class KsMaterializedWindowTable method get.

public KsMaterializedQueryResult<WindowedRow> get(final int partition, final Range<Instant> windowStartBounds, final Range<Instant> windowEndBounds, final Optional<Position> position) {
    try {
        final ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>> store = stateStore.store(QueryableStoreTypes.timestampedWindowStore(), partition);
        final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds);
        final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds);
        final KeyValueIterator<Windowed<GenericKey>, ValueAndTimestamp<GenericRow>> iterator = cacheBypassFetcherAll.fetchAll(store, lower, upper);
        return KsMaterializedQueryResult.rowIterator(Streams.stream(IteratorUtil.onComplete(iterator, iterator::close)).map(next -> {
            final Instant windowStart = next.key.window().startTime();
            if (!windowStartBounds.contains(windowStart)) {
                return null;
            }
            final Instant windowEnd = next.key.window().endTime();
            if (!windowEndBounds.contains(windowEnd)) {
                return null;
            }
            final TimeWindow window = new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli());
            final WindowedRow row = WindowedRow.of(stateStore.schema(), new Windowed<>(next.key.key(), window), next.value.value(), next.value.timestamp());
            return row;
        }).filter(Objects::nonNull).iterator());
    } catch (final Exception e) {
        throw new MaterializationException("Failed to scan materialized table", e);
    }
}
Also used : ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) Windowed(org.apache.kafka.streams.kstream.Windowed) ReadOnlyWindowStore(org.apache.kafka.streams.state.ReadOnlyWindowStore) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) WindowStoreCacheBypassFetcherAll(io.confluent.ksql.execution.streams.materialization.ks.WindowStoreCacheBypass.WindowStoreCacheBypassFetcherAll) IteratorUtil(io.confluent.ksql.util.IteratorUtil) ImmutableList(com.google.common.collect.ImmutableList) WindowedRow(io.confluent.ksql.execution.streams.materialization.WindowedRow) Windowed(org.apache.kafka.streams.kstream.Windowed) Duration(java.time.Duration) WindowStoreCacheBypassFetcherRange(io.confluent.ksql.execution.streams.materialization.ks.WindowStoreCacheBypass.WindowStoreCacheBypassFetcherRange) Position(org.apache.kafka.streams.query.Position) Range(com.google.common.collect.Range) KeyValue(org.apache.kafka.streams.KeyValue) MaterializedWindowedTable(io.confluent.ksql.execution.streams.materialization.MaterializedWindowedTable) Streams(com.google.common.collect.Streams) Instant(java.time.Instant) StreamsMaterializedWindowedTable(io.confluent.ksql.execution.streams.materialization.StreamsMaterializedWindowedTable) QueryableStoreTypes(org.apache.kafka.streams.state.QueryableStoreTypes) Objects(java.util.Objects) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) WindowStoreIterator(org.apache.kafka.streams.state.WindowStoreIterator) GenericRow(io.confluent.ksql.GenericRow) Optional(java.util.Optional) GenericKey(io.confluent.ksql.GenericKey) Builder(com.google.common.collect.ImmutableList.Builder) TimeWindow(org.apache.kafka.streams.kstream.internals.TimeWindow) WindowStoreCacheBypassFetcher(io.confluent.ksql.execution.streams.materialization.ks.WindowStoreCacheBypass.WindowStoreCacheBypassFetcher) Instant(java.time.Instant) Objects(java.util.Objects) GenericKey(io.confluent.ksql.GenericKey) WindowedRow(io.confluent.ksql.execution.streams.materialization.WindowedRow) TimeWindow(org.apache.kafka.streams.kstream.internals.TimeWindow) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException)

Example 8 with MaterializationException

use of io.confluent.ksql.execution.streams.materialization.MaterializationException in project ksql by confluentinc.

the class KsMaterializedWindowTable method get.

@Override
public KsMaterializedQueryResult<WindowedRow> get(final GenericKey key, final int partition, final Range<Instant> windowStartBounds, final Range<Instant> windowEndBounds, final Optional<Position> position) {
    try {
        final ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>> store = stateStore.store(QueryableStoreTypes.timestampedWindowStore(), partition);
        final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds);
        final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds);
        try (WindowStoreIterator<ValueAndTimestamp<GenericRow>> it = cacheBypassFetcher.fetch(store, key, lower, upper)) {
            final Builder<WindowedRow> builder = ImmutableList.builder();
            while (it.hasNext()) {
                final KeyValue<Long, ValueAndTimestamp<GenericRow>> next = it.next();
                final Instant windowStart = Instant.ofEpochMilli(next.key);
                if (!windowStartBounds.contains(windowStart)) {
                    continue;
                }
                final Instant windowEnd = windowStart.plus(windowSize);
                if (!windowEndBounds.contains(windowEnd)) {
                    continue;
                }
                final TimeWindow window = new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli());
                final WindowedRow row = WindowedRow.of(stateStore.schema(), new Windowed<>(key, window), next.value.value(), next.value.timestamp());
                builder.add(row);
            }
            return KsMaterializedQueryResult.rowIterator(builder.build().iterator());
        }
    } catch (final Exception e) {
        throw new MaterializationException("Failed to get value from materialized table", e);
    }
}
Also used : Instant(java.time.Instant) TimeWindow(org.apache.kafka.streams.kstream.internals.TimeWindow) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) GenericKey(io.confluent.ksql.GenericKey) WindowedRow(io.confluent.ksql.execution.streams.materialization.WindowedRow)

Example 9 with MaterializationException

use of io.confluent.ksql.execution.streams.materialization.MaterializationException in project ksql by confluentinc.

the class KsMaterializedTableIQv2 method get.

@Override
public KsMaterializedQueryResult<Row> get(final int partition, final Optional<Position> position) {
    try {
        final RangeQuery<GenericKey, ValueAndTimestamp<GenericRow>> query = RangeQuery.withNoBounds();
        StateQueryRequest<KeyValueIterator<GenericKey, ValueAndTimestamp<GenericRow>>> request = inStore(stateStore.getStateStoreName()).withQuery(query).withPartitions(ImmutableSet.of(partition));
        if (position.isPresent()) {
            request = request.withPositionBound(PositionBound.at(position.get()));
        }
        final StateQueryResult<KeyValueIterator<GenericKey, ValueAndTimestamp<GenericRow>>> result = stateStore.getKafkaStreams().query(request);
        final QueryResult<KeyValueIterator<GenericKey, ValueAndTimestamp<GenericRow>>> queryResult = result.getPartitionResults().get(partition);
        if (queryResult.isFailure()) {
            throw failedQueryException(queryResult);
        } else if (queryResult.getResult() == null) {
            return KsMaterializedQueryResult.rowIteratorWithPosition(Collections.emptyIterator(), queryResult.getPosition());
        } else {
            final KeyValueIterator<GenericKey, ValueAndTimestamp<GenericRow>> iterator = queryResult.getResult();
            return KsMaterializedQueryResult.rowIteratorWithPosition(Streams.stream(IteratorUtil.onComplete(iterator, iterator::close)).map(keyValue -> Row.of(stateStore.schema(), keyValue.key, keyValue.value.value(), keyValue.value.timestamp())).iterator(), queryResult.getPosition());
        }
    } catch (final NotUpToBoundException | MaterializationException e) {
        throw e;
    } catch (final Exception e) {
        throw new MaterializationException("Failed to scan materialized table", e);
    }
}
Also used : ImmutableSet(com.google.common.collect.ImmutableSet) StateQueryRequest.inStore(org.apache.kafka.streams.query.StateQueryRequest.inStore) MaterializedTable(io.confluent.ksql.execution.streams.materialization.MaterializedTable) Position(org.apache.kafka.streams.query.Position) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) Row(io.confluent.ksql.execution.streams.materialization.Row) FailureReason(org.apache.kafka.streams.query.FailureReason) PositionBound(org.apache.kafka.streams.query.PositionBound) RangeQuery(org.apache.kafka.streams.query.RangeQuery) Streams(com.google.common.collect.Streams) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) StreamsMaterializedTable(io.confluent.ksql.execution.streams.materialization.StreamsMaterializedTable) Objects(java.util.Objects) StateQueryRequest(org.apache.kafka.streams.query.StateQueryRequest) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) IteratorUtil(io.confluent.ksql.util.IteratorUtil) StateQueryResult(org.apache.kafka.streams.query.StateQueryResult) ImmutableList(com.google.common.collect.ImmutableList) GenericRow(io.confluent.ksql.GenericRow) Optional(java.util.Optional) GenericKey(io.confluent.ksql.GenericKey) KeyQuery(org.apache.kafka.streams.query.KeyQuery) Collections(java.util.Collections) QueryResult(org.apache.kafka.streams.query.QueryResult) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) GenericRow(io.confluent.ksql.GenericRow) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) GenericKey(io.confluent.ksql.GenericKey)

Example 10 with MaterializationException

use of io.confluent.ksql.execution.streams.materialization.MaterializationException in project ksql by confluentinc.

the class KsMaterializedTableIQv2 method get.

// CHECKSTYLE_RULES.OFF: CyclomaticComplexity
@Override
public KsMaterializedQueryResult<Row> get(final int partition, final GenericKey from, final GenericKey to, final Optional<Position> position) {
    // CHECKSTYLE_RULES.ON: CyclomaticComplexity
    try {
        final RangeQuery<GenericKey, ValueAndTimestamp<GenericRow>> query;
        if (from != null && to != null) {
            query = RangeQuery.withRange(from, to);
        } else if (from == null && to != null) {
            query = RangeQuery.withUpperBound(to);
        } else if (from != null && to == null) {
            query = RangeQuery.withLowerBound(from);
        } else {
            query = RangeQuery.withNoBounds();
        }
        StateQueryRequest<KeyValueIterator<GenericKey, ValueAndTimestamp<GenericRow>>> request = inStore(stateStore.getStateStoreName()).withQuery(query).withPartitions(ImmutableSet.of(partition));
        if (position.isPresent()) {
            request = request.withPositionBound(PositionBound.at(position.get()));
        }
        final StateQueryResult<KeyValueIterator<GenericKey, ValueAndTimestamp<GenericRow>>> result = stateStore.getKafkaStreams().query(request);
        final QueryResult<KeyValueIterator<GenericKey, ValueAndTimestamp<GenericRow>>> queryResult = result.getPartitionResults().get(partition);
        if (queryResult.isFailure()) {
            throw failedQueryException(queryResult);
        } else if (queryResult.getResult() == null) {
            return KsMaterializedQueryResult.rowIteratorWithPosition(Collections.emptyIterator(), queryResult.getPosition());
        } else {
            final KeyValueIterator<GenericKey, ValueAndTimestamp<GenericRow>> iterator = queryResult.getResult();
            return KsMaterializedQueryResult.rowIteratorWithPosition(Streams.stream(IteratorUtil.onComplete(iterator, iterator::close)).map(keyValue -> Row.of(stateStore.schema(), keyValue.key, keyValue.value.value(), keyValue.value.timestamp())).iterator(), queryResult.getPosition());
        }
    } catch (final NotUpToBoundException | MaterializationException e) {
        throw e;
    } catch (final Exception e) {
        throw new MaterializationException("Failed to range scan materialized table", e);
    }
}
Also used : ImmutableSet(com.google.common.collect.ImmutableSet) StateQueryRequest.inStore(org.apache.kafka.streams.query.StateQueryRequest.inStore) MaterializedTable(io.confluent.ksql.execution.streams.materialization.MaterializedTable) Position(org.apache.kafka.streams.query.Position) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) Row(io.confluent.ksql.execution.streams.materialization.Row) FailureReason(org.apache.kafka.streams.query.FailureReason) PositionBound(org.apache.kafka.streams.query.PositionBound) RangeQuery(org.apache.kafka.streams.query.RangeQuery) Streams(com.google.common.collect.Streams) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) StreamsMaterializedTable(io.confluent.ksql.execution.streams.materialization.StreamsMaterializedTable) Objects(java.util.Objects) StateQueryRequest(org.apache.kafka.streams.query.StateQueryRequest) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) IteratorUtil(io.confluent.ksql.util.IteratorUtil) StateQueryResult(org.apache.kafka.streams.query.StateQueryResult) ImmutableList(com.google.common.collect.ImmutableList) GenericRow(io.confluent.ksql.GenericRow) Optional(java.util.Optional) GenericKey(io.confluent.ksql.GenericKey) KeyQuery(org.apache.kafka.streams.query.KeyQuery) Collections(java.util.Collections) QueryResult(org.apache.kafka.streams.query.QueryResult) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) MaterializationException(io.confluent.ksql.execution.streams.materialization.MaterializationException) ValueAndTimestamp(org.apache.kafka.streams.state.ValueAndTimestamp) GenericRow(io.confluent.ksql.GenericRow) KeyValueIterator(org.apache.kafka.streams.state.KeyValueIterator) GenericKey(io.confluent.ksql.GenericKey)

Aggregations

MaterializationException (io.confluent.ksql.execution.streams.materialization.MaterializationException)13 GenericKey (io.confluent.ksql.GenericKey)10 ImmutableList (com.google.common.collect.ImmutableList)7 GenericRow (io.confluent.ksql.GenericRow)7 Objects (java.util.Objects)7 Optional (java.util.Optional)7 ValueAndTimestamp (org.apache.kafka.streams.state.ValueAndTimestamp)7 Streams (com.google.common.collect.Streams)5 WindowedRow (io.confluent.ksql.execution.streams.materialization.WindowedRow)5 IteratorUtil (io.confluent.ksql.util.IteratorUtil)4 Collections (java.util.Collections)4 Position (org.apache.kafka.streams.query.Position)4 KeyValueIterator (org.apache.kafka.streams.state.KeyValueIterator)4 VisibleForTesting (com.google.common.annotations.VisibleForTesting)3 Preconditions (com.google.common.base.Preconditions)3 Instant (java.time.Instant)3 TimeWindow (org.apache.kafka.streams.kstream.internals.TimeWindow)3 FailureReason (org.apache.kafka.streams.query.FailureReason)3 PositionBound (org.apache.kafka.streams.query.PositionBound)3 QueryResult (org.apache.kafka.streams.query.QueryResult)3