Search in sources :

Example 6 with IntObjectHashMap

use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.

the class ReaderBucketsTest method test_reader_bucket_accounts_memory_for_added_rows.

@Test
public void test_reader_bucket_accounts_memory_for_added_rows() throws Exception {
    var e = SQLExecutor.builder(clusterService).addTable("create table t1 (x text)").build();
    var t1 = e.resolveTableInfo("t1");
    var x = (Reference) e.asSymbol("x");
    var fetchSource = new FetchSource();
    fetchSource.addFetchIdColumn(new InputColumn(0, DataTypes.LONG));
    fetchSource.addRefToFetch(x);
    var fetchRows = FetchRows.create(CoordinatorTxnCtx.systemTransactionContext(), TestingHelpers.createNodeContext(), Map.of(t1.ident(), fetchSource), List.of(new FetchReference(new InputColumn(0, DataTypes.LONG), x), new InputColumn(1, DataTypes.INTEGER)));
    var bytesAccounted = new AtomicLong();
    var ramAccounting = new BlockBasedRamAccounting(bytes -> bytesAccounted.addAndGet(bytes), 1024);
    int readerId = 1;
    var readerBuckets = new ReaderBuckets(fetchRows, reader -> fetchSource, new EstimateCellsSize(List.of(DataTypes.LONG, DataTypes.INTEGER)), ramAccounting);
    long fetchId = FetchId.encode(readerId, 1);
    readerBuckets.add(new RowN(fetchId, 42));
    assertThat(bytesAccounted.get(), is(1024L));
    assertThat(readerBuckets.ramBytesUsed(), is(40L));
    IntObjectHashMap<Bucket> bucketsByReader = new IntObjectHashMap<>();
    bucketsByReader.put(readerId, new CollectionBucket(List.<Object[]>of(new Object[] { "I eat memory for breakfast" })));
    IntHashSet readerIds = new IntHashSet(2);
    readerIds.add(readerId);
    readerBuckets.generateToFetch(readerIds);
    try (var outputRows = readerBuckets.getOutputRows(List.of(bucketsByReader))) {
        assertThat(bytesAccounted.get(), is(1024L));
        assertThat(readerBuckets.ramBytesUsed(), is(136L));
    }
    assertThat("After outputRows are closed the readerBuckets are released", readerBuckets.ramBytesUsed(), is(0L));
}
Also used : FetchSource(io.crate.planner.node.fetch.FetchSource) EstimateCellsSize(io.crate.breaker.EstimateCellsSize) Reference(io.crate.metadata.Reference) FetchReference(io.crate.expression.symbol.FetchReference) IntObjectHashMap(com.carrotsearch.hppc.IntObjectHashMap) IntHashSet(com.carrotsearch.hppc.IntHashSet) AtomicLong(java.util.concurrent.atomic.AtomicLong) BlockBasedRamAccounting(io.crate.breaker.BlockBasedRamAccounting) RowN(io.crate.data.RowN) Bucket(io.crate.data.Bucket) CollectionBucket(io.crate.data.CollectionBucket) InputColumn(io.crate.expression.symbol.InputColumn) FetchReference(io.crate.expression.symbol.FetchReference) CollectionBucket(io.crate.data.CollectionBucket) Test(org.junit.Test) CrateDummyClusterServiceUnitTest(io.crate.test.integration.CrateDummyClusterServiceUnitTest)

Example 7 with IntObjectHashMap

use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.

the class NodeFetchResponseTest method testStreaming.

@Test
public void testStreaming() throws Exception {
    IntObjectHashMap<IntContainer> toFetch = new IntObjectHashMap<>();
    IntHashSet docIds = new IntHashSet(3);
    toFetch.put(1, docIds);
    IntObjectMap<Streamer[]> streamers = new IntObjectHashMap<>(1);
    streamers.put(1, new Streamer[] { DataTypes.BOOLEAN.streamer() });
    StreamBucket.Builder builder = new StreamBucket.Builder(streamers.get(1));
    builder.add(new RowN(new Object[] { true }));
    IntObjectHashMap<StreamBucket> fetched = new IntObjectHashMap<>(1);
    fetched.put(1, builder.build());
    NodeFetchResponse orig = NodeFetchResponse.forSending(fetched);
    BytesStreamOutput out = new BytesStreamOutput();
    orig.writeTo(out);
    StreamInput in = StreamInput.wrap(out.bytes());
    // receiving side is required to set the streamers
    NodeFetchResponse streamed = NodeFetchResponse.forReceiveing(streamers);
    streamed.readFrom(in);
    assertThat((Row) Iterables.getOnlyElement(streamed.fetched().get(1)), isRow(true));
}
Also used : IntObjectHashMap(com.carrotsearch.hppc.IntObjectHashMap) IntHashSet(com.carrotsearch.hppc.IntHashSet) BytesStreamOutput(org.elasticsearch.common.io.stream.BytesStreamOutput) RowN(io.crate.data.RowN) IntContainer(com.carrotsearch.hppc.IntContainer) StreamInput(org.elasticsearch.common.io.stream.StreamInput) Test(org.junit.Test)

Example 8 with IntObjectHashMap

use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.

the class NodeFetchOperation method doFetch.

private void doFetch(FetchContext fetchContext, SettableFuture<IntObjectMap<StreamBucket>> resultFuture, IntObjectMap<? extends IntContainer> toFetch) throws Exception {
    final IntObjectHashMap<StreamBucket> fetched = new IntObjectHashMap<>(toFetch.size());
    HashMap<TableIdent, TableFetchInfo> tableFetchInfos = getTableFetchInfos(fetchContext);
    final AtomicReference<Throwable> lastThrowable = new AtomicReference<>(null);
    final AtomicInteger threadLatch = new AtomicInteger(toFetch.size());
    for (IntObjectCursor<? extends IntContainer> toFetchCursor : toFetch) {
        final int readerId = toFetchCursor.key;
        final IntContainer docIds = toFetchCursor.value;
        TableIdent ident = fetchContext.tableIdent(readerId);
        final TableFetchInfo tfi = tableFetchInfos.get(ident);
        assert tfi != null : "tfi must not be null";
        CollectRunnable runnable = new CollectRunnable(tfi.createCollector(readerId), docIds, fetched, readerId, lastThrowable, threadLatch, resultFuture, fetchContext.isKilled());
        try {
            executor.execute(runnable);
        } catch (EsRejectedExecutionException | RejectedExecutionException e) {
            runnable.run();
        }
    }
}
Also used : IntObjectHashMap(com.carrotsearch.hppc.IntObjectHashMap) TableIdent(io.crate.metadata.TableIdent) AtomicReference(java.util.concurrent.atomic.AtomicReference) StreamBucket(io.crate.executor.transport.StreamBucket) RejectedExecutionException(java.util.concurrent.RejectedExecutionException) EsRejectedExecutionException(org.elasticsearch.common.util.concurrent.EsRejectedExecutionException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) IntContainer(com.carrotsearch.hppc.IntContainer) EsRejectedExecutionException(org.elasticsearch.common.util.concurrent.EsRejectedExecutionException)

Example 9 with IntObjectHashMap

use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.

the class FetchRows method create.

public static FetchRows create(TransactionContext txnCtx, NodeContext nodeCtx, Map<RelationName, FetchSource> fetchSourceByTable, List<Symbol> outputSymbols) {
    IntArrayList fetchIdPositions = new IntArrayList();
    ArrayList<Object[]> nullRows = new ArrayList<>();
    IntObjectHashMap<UnsafeArrayRow> fetchedRows = new IntObjectHashMap<>();
    for (var fetchSource : fetchSourceByTable.values()) {
        Object[] nullRow = new Object[fetchSource.references().size()];
        for (InputColumn ic : fetchSource.fetchIdCols()) {
            fetchIdPositions.add(ic.index());
            nullRows.add(nullRow);
            fetchedRows.put(ic.index(), new UnsafeArrayRow());
        }
    }
    final UnsafeArrayRow inputRow = new UnsafeArrayRow();
    var visitor = new BaseImplementationSymbolVisitor<Void>(txnCtx, nodeCtx) {

        @Override
        public Input<?> visitInputColumn(final InputColumn inputColumn, final Void context) {
            final int idx = inputColumn.index();
            return () -> inputRow.get(idx);
        }

        @Override
        public Input<?> visitFetchReference(final FetchReference fetchReference, final Void context) {
            var ref = fetchReference.ref();
            UnsafeArrayRow row = fetchedRows.get(fetchReference.fetchId().index());
            int posInFetchedRow = fetchSourceByTable.get(ref.ident().tableIdent()).references().indexOf(ref);
            return () -> row.get(posInFetchedRow);
        }
    };
    List<Input<?>> outputExpressions = Lists2.map(outputSymbols, x -> x.accept(visitor, null));
    return new FetchRows(fetchIdPositions, outputExpressions, inputRow, fetchedRows, nullRows);
}
Also used : IntObjectHashMap(com.carrotsearch.hppc.IntObjectHashMap) ArrayList(java.util.ArrayList) IntArrayList(com.carrotsearch.hppc.IntArrayList) BaseImplementationSymbolVisitor(io.crate.expression.BaseImplementationSymbolVisitor) Input(io.crate.data.Input) UnsafeArrayRow(io.crate.data.UnsafeArrayRow) InputColumn(io.crate.expression.symbol.InputColumn) FetchReference(io.crate.expression.symbol.FetchReference) IntArrayList(com.carrotsearch.hppc.IntArrayList)

Example 10 with IntObjectHashMap

use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.

the class NodeFetchOperation method doFetch.

private CompletableFuture<? extends IntObjectMap<StreamBucket>> doFetch(FetchTask fetchTask, IntObjectMap<IntArrayList> toFetch) throws Exception {
    HashMap<RelationName, TableFetchInfo> tableFetchInfos = getTableFetchInfos(fetchTask);
    // RamAccounting is per doFetch call instead of per FetchTask/fetchPhase
    // To be able to free up the memory count when the operation is complete
    final var ramAccounting = ConcurrentRamAccounting.forCircuitBreaker("fetch-" + fetchTask.id(), circuitBreaker);
    ArrayList<Supplier<StreamBucket>> collectors = new ArrayList<>(toFetch.size());
    for (IntObjectCursor<IntArrayList> toFetchCursor : toFetch) {
        final int readerId = toFetchCursor.key;
        final IntArrayList docIds = toFetchCursor.value;
        RelationName ident = fetchTask.tableIdent(readerId);
        final TableFetchInfo tfi = tableFetchInfos.get(ident);
        assert tfi != null : "tfi must not be null";
        var collector = tfi.createCollector(readerId, new BlockBasedRamAccounting(ramAccounting::addBytes, BlockBasedRamAccounting.MAX_BLOCK_SIZE_IN_BYTES));
        collectors.add(() -> collector.collect(docIds));
    }
    return ThreadPools.runWithAvailableThreads(executor, ThreadPools.numIdleThreads(executor, numProcessors), collectors).thenApply(buckets -> {
        var toFetchIt = toFetch.iterator();
        assert toFetch.size() == buckets.size() : "Must have a bucket per reader and they must be in the same order";
        IntObjectHashMap<StreamBucket> bucketByReader = new IntObjectHashMap<>(toFetch.size());
        for (var bucket : buckets) {
            assert toFetchIt.hasNext() : "toFetchIt must have an element if there is one in buckets";
            int readerId = toFetchIt.next().key;
            bucketByReader.put(readerId, bucket);
        }
        return bucketByReader;
    }).whenComplete((result, err) -> ramAccounting.close());
}
Also used : IntObjectCursor(com.carrotsearch.hppc.cursors.IntObjectCursor) StreamBucket(io.crate.execution.engine.distribution.StreamBucket) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) RelationName(io.crate.metadata.RelationName) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) RootTask(io.crate.execution.jobs.RootTask) Supplier(java.util.function.Supplier) JobsLogs(io.crate.execution.engine.collect.stats.JobsLogs) ArrayList(java.util.ArrayList) BlockBasedRamAccounting(io.crate.breaker.BlockBasedRamAccounting) IntArrayList(com.carrotsearch.hppc.IntArrayList) Symbols(io.crate.expression.symbol.Symbols) Map(java.util.Map) BiConsumer(java.util.function.BiConsumer) CircuitBreaker(org.elasticsearch.common.breaker.CircuitBreaker) Nullable(javax.annotation.Nullable) LuceneReferenceResolver(io.crate.expression.reference.doc.lucene.LuceneReferenceResolver) Streamer(io.crate.Streamer) Collection(java.util.Collection) IndexService(org.elasticsearch.index.IndexService) Reference(io.crate.metadata.Reference) UUID(java.util.UUID) RamAccounting(io.crate.breaker.RamAccounting) LuceneCollectorExpression(io.crate.expression.reference.doc.lucene.LuceneCollectorExpression) TasksService(io.crate.execution.jobs.TasksService) IntObjectMap(com.carrotsearch.hppc.IntObjectMap) ConcurrentRamAccounting(io.crate.breaker.ConcurrentRamAccounting) IntObjectHashMap(com.carrotsearch.hppc.IntObjectHashMap) SQLExceptions(io.crate.exceptions.SQLExceptions) ThreadPools(io.crate.execution.support.ThreadPools) IntObjectHashMap(com.carrotsearch.hppc.IntObjectHashMap) ArrayList(java.util.ArrayList) IntArrayList(com.carrotsearch.hppc.IntArrayList) BlockBasedRamAccounting(io.crate.breaker.BlockBasedRamAccounting) RelationName(io.crate.metadata.RelationName) Supplier(java.util.function.Supplier) IntArrayList(com.carrotsearch.hppc.IntArrayList)

Aggregations

IntObjectHashMap (com.carrotsearch.hppc.IntObjectHashMap)15 IntArrayList (com.carrotsearch.hppc.IntArrayList)6 IntHashSet (com.carrotsearch.hppc.IntHashSet)6 Test (org.junit.Test)6 HashMap (java.util.HashMap)5 IntContainer (com.carrotsearch.hppc.IntContainer)4 IntObjectMap (com.carrotsearch.hppc.IntObjectMap)4 BlockBasedRamAccounting (io.crate.breaker.BlockBasedRamAccounting)4 RowN (io.crate.data.RowN)4 Reference (io.crate.metadata.Reference)4 ArrayList (java.util.ArrayList)4 Map (java.util.Map)4 Streamer (io.crate.Streamer)3 RamAccounting (io.crate.breaker.RamAccounting)3 StreamBucket (io.crate.execution.engine.distribution.StreamBucket)3 FetchReference (io.crate.expression.symbol.FetchReference)3 InputColumn (io.crate.expression.symbol.InputColumn)3 RelationName (io.crate.metadata.RelationName)3 FetchSource (io.crate.planner.node.fetch.FetchSource)3 UUID (java.util.UUID)3