use of io.crate.breaker.EstimateCellsSize in project crate by crate.
the class FetchProjector method create.
public static Projector create(FetchProjection projection, RamAccounting ramAccounting, LongSupplier getBucketsBytesThreshold, TransactionContext txnCtx, NodeContext nodeCtx, FetchOperation fetchOperation) {
final FetchRows fetchRows = FetchRows.create(txnCtx, nodeCtx, projection.fetchSources(), projection.outputSymbols());
EstimateCellsSize estimateRowSize = new EstimateCellsSize(projection.inputTypes());
return (BatchIterator<Row> source) -> {
final long maxBucketsSizeInBytes = getBucketsBytesThreshold.getAsLong();
BatchIterator<ReaderBuckets> buckets = BatchIterators.partition(source, projection.getFetchSize(), () -> new ReaderBuckets(fetchRows, projection::getFetchSourceByReader, estimateRowSize, ramAccounting), ReaderBuckets::add, readerBuckets -> readerBuckets.ramBytesUsed() > maxBucketsSizeInBytes);
return new AsyncFlatMapBatchIterator<>(buckets, new FetchMapper(fetchOperation, projection.nodeReaders()));
};
}
use of io.crate.breaker.EstimateCellsSize in project crate by crate.
the class ReaderBucketsTest method test_reader_bucket_accounts_memory_for_added_rows.
@Test
public void test_reader_bucket_accounts_memory_for_added_rows() throws Exception {
var e = SQLExecutor.builder(clusterService).addTable("create table t1 (x text)").build();
var t1 = e.resolveTableInfo("t1");
var x = (Reference) e.asSymbol("x");
var fetchSource = new FetchSource();
fetchSource.addFetchIdColumn(new InputColumn(0, DataTypes.LONG));
fetchSource.addRefToFetch(x);
var fetchRows = FetchRows.create(CoordinatorTxnCtx.systemTransactionContext(), TestingHelpers.createNodeContext(), Map.of(t1.ident(), fetchSource), List.of(new FetchReference(new InputColumn(0, DataTypes.LONG), x), new InputColumn(1, DataTypes.INTEGER)));
var bytesAccounted = new AtomicLong();
var ramAccounting = new BlockBasedRamAccounting(bytes -> bytesAccounted.addAndGet(bytes), 1024);
int readerId = 1;
var readerBuckets = new ReaderBuckets(fetchRows, reader -> fetchSource, new EstimateCellsSize(List.of(DataTypes.LONG, DataTypes.INTEGER)), ramAccounting);
long fetchId = FetchId.encode(readerId, 1);
readerBuckets.add(new RowN(fetchId, 42));
assertThat(bytesAccounted.get(), is(1024L));
assertThat(readerBuckets.ramBytesUsed(), is(40L));
IntObjectHashMap<Bucket> bucketsByReader = new IntObjectHashMap<>();
bucketsByReader.put(readerId, new CollectionBucket(List.<Object[]>of(new Object[] { "I eat memory for breakfast" })));
IntHashSet readerIds = new IntHashSet(2);
readerIds.add(readerId);
readerBuckets.generateToFetch(readerIds);
try (var outputRows = readerBuckets.getOutputRows(List.of(bucketsByReader))) {
assertThat(bytesAccounted.get(), is(1024L));
assertThat(readerBuckets.ramBytesUsed(), is(136L));
}
assertThat("After outputRows are closed the readerBuckets are released", readerBuckets.ramBytesUsed(), is(0L));
}
Aggregations