use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.
the class ReaderBucketsTest method test_reader_bucket_accounts_memory_for_added_rows.
@Test
public void test_reader_bucket_accounts_memory_for_added_rows() throws Exception {
var e = SQLExecutor.builder(clusterService).addTable("create table t1 (x text)").build();
var t1 = e.resolveTableInfo("t1");
var x = (Reference) e.asSymbol("x");
var fetchSource = new FetchSource();
fetchSource.addFetchIdColumn(new InputColumn(0, DataTypes.LONG));
fetchSource.addRefToFetch(x);
var fetchRows = FetchRows.create(CoordinatorTxnCtx.systemTransactionContext(), TestingHelpers.createNodeContext(), Map.of(t1.ident(), fetchSource), List.of(new FetchReference(new InputColumn(0, DataTypes.LONG), x), new InputColumn(1, DataTypes.INTEGER)));
var bytesAccounted = new AtomicLong();
var ramAccounting = new BlockBasedRamAccounting(bytes -> bytesAccounted.addAndGet(bytes), 1024);
int readerId = 1;
var readerBuckets = new ReaderBuckets(fetchRows, reader -> fetchSource, new EstimateCellsSize(List.of(DataTypes.LONG, DataTypes.INTEGER)), ramAccounting);
long fetchId = FetchId.encode(readerId, 1);
readerBuckets.add(new RowN(fetchId, 42));
assertThat(bytesAccounted.get(), is(1024L));
assertThat(readerBuckets.ramBytesUsed(), is(40L));
IntObjectHashMap<Bucket> bucketsByReader = new IntObjectHashMap<>();
bucketsByReader.put(readerId, new CollectionBucket(List.<Object[]>of(new Object[] { "I eat memory for breakfast" })));
IntHashSet readerIds = new IntHashSet(2);
readerIds.add(readerId);
readerBuckets.generateToFetch(readerIds);
try (var outputRows = readerBuckets.getOutputRows(List.of(bucketsByReader))) {
assertThat(bytesAccounted.get(), is(1024L));
assertThat(readerBuckets.ramBytesUsed(), is(136L));
}
assertThat("After outputRows are closed the readerBuckets are released", readerBuckets.ramBytesUsed(), is(0L));
}
use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.
the class NodeFetchResponseTest method testStreaming.
@Test
public void testStreaming() throws Exception {
IntObjectHashMap<IntContainer> toFetch = new IntObjectHashMap<>();
IntHashSet docIds = new IntHashSet(3);
toFetch.put(1, docIds);
IntObjectMap<Streamer[]> streamers = new IntObjectHashMap<>(1);
streamers.put(1, new Streamer[] { DataTypes.BOOLEAN.streamer() });
StreamBucket.Builder builder = new StreamBucket.Builder(streamers.get(1));
builder.add(new RowN(new Object[] { true }));
IntObjectHashMap<StreamBucket> fetched = new IntObjectHashMap<>(1);
fetched.put(1, builder.build());
NodeFetchResponse orig = NodeFetchResponse.forSending(fetched);
BytesStreamOutput out = new BytesStreamOutput();
orig.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes());
// receiving side is required to set the streamers
NodeFetchResponse streamed = NodeFetchResponse.forReceiveing(streamers);
streamed.readFrom(in);
assertThat((Row) Iterables.getOnlyElement(streamed.fetched().get(1)), isRow(true));
}
use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.
the class NodeFetchOperation method doFetch.
private void doFetch(FetchContext fetchContext, SettableFuture<IntObjectMap<StreamBucket>> resultFuture, IntObjectMap<? extends IntContainer> toFetch) throws Exception {
final IntObjectHashMap<StreamBucket> fetched = new IntObjectHashMap<>(toFetch.size());
HashMap<TableIdent, TableFetchInfo> tableFetchInfos = getTableFetchInfos(fetchContext);
final AtomicReference<Throwable> lastThrowable = new AtomicReference<>(null);
final AtomicInteger threadLatch = new AtomicInteger(toFetch.size());
for (IntObjectCursor<? extends IntContainer> toFetchCursor : toFetch) {
final int readerId = toFetchCursor.key;
final IntContainer docIds = toFetchCursor.value;
TableIdent ident = fetchContext.tableIdent(readerId);
final TableFetchInfo tfi = tableFetchInfos.get(ident);
assert tfi != null : "tfi must not be null";
CollectRunnable runnable = new CollectRunnable(tfi.createCollector(readerId), docIds, fetched, readerId, lastThrowable, threadLatch, resultFuture, fetchContext.isKilled());
try {
executor.execute(runnable);
} catch (EsRejectedExecutionException | RejectedExecutionException e) {
runnable.run();
}
}
}
use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.
the class FetchRows method create.
public static FetchRows create(TransactionContext txnCtx, NodeContext nodeCtx, Map<RelationName, FetchSource> fetchSourceByTable, List<Symbol> outputSymbols) {
IntArrayList fetchIdPositions = new IntArrayList();
ArrayList<Object[]> nullRows = new ArrayList<>();
IntObjectHashMap<UnsafeArrayRow> fetchedRows = new IntObjectHashMap<>();
for (var fetchSource : fetchSourceByTable.values()) {
Object[] nullRow = new Object[fetchSource.references().size()];
for (InputColumn ic : fetchSource.fetchIdCols()) {
fetchIdPositions.add(ic.index());
nullRows.add(nullRow);
fetchedRows.put(ic.index(), new UnsafeArrayRow());
}
}
final UnsafeArrayRow inputRow = new UnsafeArrayRow();
var visitor = new BaseImplementationSymbolVisitor<Void>(txnCtx, nodeCtx) {
@Override
public Input<?> visitInputColumn(final InputColumn inputColumn, final Void context) {
final int idx = inputColumn.index();
return () -> inputRow.get(idx);
}
@Override
public Input<?> visitFetchReference(final FetchReference fetchReference, final Void context) {
var ref = fetchReference.ref();
UnsafeArrayRow row = fetchedRows.get(fetchReference.fetchId().index());
int posInFetchedRow = fetchSourceByTable.get(ref.ident().tableIdent()).references().indexOf(ref);
return () -> row.get(posInFetchedRow);
}
};
List<Input<?>> outputExpressions = Lists2.map(outputSymbols, x -> x.accept(visitor, null));
return new FetchRows(fetchIdPositions, outputExpressions, inputRow, fetchedRows, nullRows);
}
use of com.carrotsearch.hppc.IntObjectHashMap in project crate by crate.
the class NodeFetchOperation method doFetch.
private CompletableFuture<? extends IntObjectMap<StreamBucket>> doFetch(FetchTask fetchTask, IntObjectMap<IntArrayList> toFetch) throws Exception {
HashMap<RelationName, TableFetchInfo> tableFetchInfos = getTableFetchInfos(fetchTask);
// RamAccounting is per doFetch call instead of per FetchTask/fetchPhase
// To be able to free up the memory count when the operation is complete
final var ramAccounting = ConcurrentRamAccounting.forCircuitBreaker("fetch-" + fetchTask.id(), circuitBreaker);
ArrayList<Supplier<StreamBucket>> collectors = new ArrayList<>(toFetch.size());
for (IntObjectCursor<IntArrayList> toFetchCursor : toFetch) {
final int readerId = toFetchCursor.key;
final IntArrayList docIds = toFetchCursor.value;
RelationName ident = fetchTask.tableIdent(readerId);
final TableFetchInfo tfi = tableFetchInfos.get(ident);
assert tfi != null : "tfi must not be null";
var collector = tfi.createCollector(readerId, new BlockBasedRamAccounting(ramAccounting::addBytes, BlockBasedRamAccounting.MAX_BLOCK_SIZE_IN_BYTES));
collectors.add(() -> collector.collect(docIds));
}
return ThreadPools.runWithAvailableThreads(executor, ThreadPools.numIdleThreads(executor, numProcessors), collectors).thenApply(buckets -> {
var toFetchIt = toFetch.iterator();
assert toFetch.size() == buckets.size() : "Must have a bucket per reader and they must be in the same order";
IntObjectHashMap<StreamBucket> bucketByReader = new IntObjectHashMap<>(toFetch.size());
for (var bucket : buckets) {
assert toFetchIt.hasNext() : "toFetchIt must have an element if there is one in buckets";
int readerId = toFetchIt.next().key;
bucketByReader.put(readerId, bucket);
}
return bucketByReader;
}).whenComplete((result, err) -> ramAccounting.close());
}
Aggregations