Search in sources :

Example 16 with CircuitBreakingException

use of org.elasticsearch.common.breaker.CircuitBreakingException in project crate by crate.

the class OrderedLuceneBatchIteratorFactoryTest method testSingleCollectorOrderedLuceneBatchIteratorTripsCircuitBreaker.

@Test
public void testSingleCollectorOrderedLuceneBatchIteratorTripsCircuitBreaker() throws Exception {
    RowAccounting rowAccounting = mock(RowAccounting.class);
    CircuitBreakingException circuitBreakingException = new CircuitBreakingException("tripped circuit breaker");
    doThrow(circuitBreakingException).when(rowAccounting).accountForAndMaybeBreak(any(Row.class));
    BatchIterator<Row> rowBatchIterator = OrderedLuceneBatchIteratorFactory.newInstance(Arrays.asList(createOrderedCollector(searcher1, 1)), OrderingByPosition.rowOrdering(new int[] { 0 }, reverseFlags, nullsFirst), rowAccounting, Runnable::run, () -> 2, true);
    consumeIteratorAndVerifyResultIsException(rowBatchIterator, circuitBreakingException);
}
Also used : RowAccounting(io.crate.breaker.RowAccounting) CircuitBreakingException(org.elasticsearch.common.breaker.CircuitBreakingException) Row(io.crate.data.Row) Test(org.junit.Test)

Example 17 with CircuitBreakingException

use of org.elasticsearch.common.breaker.CircuitBreakingException in project crate by crate.

the class ReservoirSampler method getSamples.

private Samples getSamples(List<Reference> columns, int maxSamples, DocTableInfo docTable, Random random, Metadata metadata, CoordinatorTxnCtx coordinatorTxnCtx, List<Streamer> streamers, List<Engine.Searcher> searchersToRelease, RamAccounting ramAccounting) {
    ramAccounting.addBytes(DataTypes.LONG.fixedSize() * maxSamples);
    Reservoir<Long> fetchIdSamples = new Reservoir<>(maxSamples, random);
    ArrayList<DocIdToRow> docIdToRowsFunctionPerReader = new ArrayList<>();
    long totalNumDocs = 0;
    long totalSizeInBytes = 0;
    for (String index : docTable.concreteOpenIndices()) {
        var indexMetadata = metadata.index(index);
        if (indexMetadata == null) {
            continue;
        }
        var indexService = indicesService.indexService(indexMetadata.getIndex());
        if (indexService == null) {
            continue;
        }
        var mapperService = indexService.mapperService();
        FieldTypeLookup fieldTypeLookup = mapperService::fullName;
        var ctx = new DocInputFactory(nodeCtx, new LuceneReferenceResolver(indexService.index().getName(), fieldTypeLookup, docTable.partitionedByColumns())).getCtx(coordinatorTxnCtx);
        ctx.add(columns);
        List<Input<?>> inputs = ctx.topLevelInputs();
        List<? extends LuceneCollectorExpression<?>> expressions = ctx.expressions();
        CollectorContext collectorContext = new CollectorContext();
        for (LuceneCollectorExpression<?> expression : expressions) {
            expression.startCollect(collectorContext);
        }
        for (IndexShard indexShard : indexService) {
            if (!indexShard.routingEntry().primary()) {
                continue;
            }
            try {
                Engine.Searcher searcher = indexShard.acquireSearcher("update-table-statistics");
                searchersToRelease.add(searcher);
                totalNumDocs += searcher.getIndexReader().numDocs();
                totalSizeInBytes += indexShard.storeStats().getSizeInBytes();
                DocIdToRow docIdToRow = new DocIdToRow(searcher, inputs, expressions);
                docIdToRowsFunctionPerReader.add(docIdToRow);
                try {
                    // We do the sampling in 2 phases. First we get the docIds;
                    // then we retrieve the column values for the sampled docIds.
                    // we do this in 2 phases because the reservoir sampling might override previously seen
                    // items and we want to avoid unnecessary disk-lookup
                    var collector = new ReservoirCollector(fetchIdSamples, searchersToRelease.size() - 1);
                    searcher.search(new MatchAllDocsQuery(), collector);
                } catch (IOException e) {
                    throw new UncheckedIOException(e);
                }
            } catch (IllegalIndexShardStateException | AlreadyClosedException ignored) {
            }
        }
    }
    var rowAccounting = new RowCellsAccountingWithEstimators(Symbols.typeView(columns), ramAccounting, 0);
    ArrayList<Row> records = new ArrayList<>();
    for (long fetchId : fetchIdSamples.samples()) {
        int readerId = FetchId.decodeReaderId(fetchId);
        DocIdToRow docIdToRow = docIdToRowsFunctionPerReader.get(readerId);
        Object[] row = docIdToRow.apply(FetchId.decodeDocId(fetchId));
        try {
            rowAccounting.accountForAndMaybeBreak(row);
        } catch (CircuitBreakingException e) {
            LOGGER.info("Stopped gathering samples for `ANALYZE` operation because circuit breaker triggered. " + "Generating statistics with {} instead of {} records", records.size(), maxSamples);
            break;
        }
        records.add(new RowN(row));
    }
    return new Samples(records, streamers, totalNumDocs, totalSizeInBytes);
}
Also used : DocInputFactory(io.crate.execution.engine.collect.DocInputFactory) ArrayList(java.util.ArrayList) UncheckedIOException(java.io.UncheckedIOException) AlreadyClosedException(org.apache.lucene.store.AlreadyClosedException) RowCellsAccountingWithEstimators(io.crate.breaker.RowCellsAccountingWithEstimators) Input(io.crate.data.Input) LuceneReferenceResolver(io.crate.expression.reference.doc.lucene.LuceneReferenceResolver) CollectorContext(io.crate.expression.reference.doc.lucene.CollectorContext) Engine(org.elasticsearch.index.engine.Engine) IndexShard(org.elasticsearch.index.shard.IndexShard) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) MatchAllDocsQuery(org.apache.lucene.search.MatchAllDocsQuery) IllegalIndexShardStateException(org.elasticsearch.index.shard.IllegalIndexShardStateException) RowN(io.crate.data.RowN) FieldTypeLookup(io.crate.lucene.FieldTypeLookup) CircuitBreakingException(org.elasticsearch.common.breaker.CircuitBreakingException) Row(io.crate.data.Row)

Aggregations

CircuitBreakingException (org.elasticsearch.common.breaker.CircuitBreakingException)17 ArrayList (java.util.ArrayList)7 BreakerSettings (org.elasticsearch.indices.breaker.BreakerSettings)5 Row (io.crate.data.Row)4 Client (org.elasticsearch.client.Client)4 CircuitBreaker (org.elasticsearch.common.breaker.CircuitBreaker)4 NoopCircuitBreaker (org.elasticsearch.common.breaker.NoopCircuitBreaker)4 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)4 IOException (java.io.IOException)3 IndexRequestBuilder (org.elasticsearch.action.index.IndexRequestBuilder)3 Settings (org.elasticsearch.common.settings.Settings)3 ByteSizeValue (org.elasticsearch.common.unit.ByteSizeValue)3 RowAccounting (io.crate.breaker.RowAccounting)2 InvocationTargetException (java.lang.reflect.InvocationTargetException)2 Method (java.lang.reflect.Method)2 List (java.util.List)2 NodeStats (org.elasticsearch.action.admin.cluster.node.stats.NodeStats)2 NodesStatsResponse (org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse)2 ShardSearchFailure (org.elasticsearch.action.search.ShardSearchFailure)2 ChildMemoryCircuitBreaker (org.elasticsearch.common.breaker.ChildMemoryCircuitBreaker)2