use of io.crate.data.CollectionBucket in project crate by crate.
the class ReaderBucketsTest method test_reader_bucket_accounts_memory_for_added_rows.
@Test
public void test_reader_bucket_accounts_memory_for_added_rows() throws Exception {
var e = SQLExecutor.builder(clusterService).addTable("create table t1 (x text)").build();
var t1 = e.resolveTableInfo("t1");
var x = (Reference) e.asSymbol("x");
var fetchSource = new FetchSource();
fetchSource.addFetchIdColumn(new InputColumn(0, DataTypes.LONG));
fetchSource.addRefToFetch(x);
var fetchRows = FetchRows.create(CoordinatorTxnCtx.systemTransactionContext(), TestingHelpers.createNodeContext(), Map.of(t1.ident(), fetchSource), List.of(new FetchReference(new InputColumn(0, DataTypes.LONG), x), new InputColumn(1, DataTypes.INTEGER)));
var bytesAccounted = new AtomicLong();
var ramAccounting = new BlockBasedRamAccounting(bytes -> bytesAccounted.addAndGet(bytes), 1024);
int readerId = 1;
var readerBuckets = new ReaderBuckets(fetchRows, reader -> fetchSource, new EstimateCellsSize(List.of(DataTypes.LONG, DataTypes.INTEGER)), ramAccounting);
long fetchId = FetchId.encode(readerId, 1);
readerBuckets.add(new RowN(fetchId, 42));
assertThat(bytesAccounted.get(), is(1024L));
assertThat(readerBuckets.ramBytesUsed(), is(40L));
IntObjectHashMap<Bucket> bucketsByReader = new IntObjectHashMap<>();
bucketsByReader.put(readerId, new CollectionBucket(List.<Object[]>of(new Object[] { "I eat memory for breakfast" })));
IntHashSet readerIds = new IntHashSet(2);
readerIds.add(readerId);
readerBuckets.generateToFetch(readerIds);
try (var outputRows = readerBuckets.getOutputRows(List.of(bucketsByReader))) {
assertThat(bytesAccounted.get(), is(1024L));
assertThat(readerBuckets.ramBytesUsed(), is(136L));
}
assertThat("After outputRows are closed the readerBuckets are released", readerBuckets.ramBytesUsed(), is(0L));
}
use of io.crate.data.CollectionBucket in project crate by crate.
the class DistResultRXTaskTest method testPagingWithSortedPagingIterator.
@Test
public void testPagingWithSortedPagingIterator() throws Throwable {
TestingRowConsumer batchConsumer = new TestingRowConsumer();
DistResultRXTask ctx = getPageDownstreamContext(batchConsumer, new SortedPagingIterator<>(Comparator.comparingInt(r -> (int) r.get(0)), false), 2);
Bucket b1 = new ArrayBucket(new Object[][] { new Object[] { 1 }, new Object[] { 1 } });
Bucket b11 = new ArrayBucket(new Object[][] { new Object[] { 2 }, new Object[] { 2 } });
PageBucketReceiver bucketReceiver = ctx.getBucketReceiver((byte) 0);
assertThat(bucketReceiver, notNullValue());
bucketReceiver.setBucket(0, b1, false, needMore -> {
if (needMore) {
bucketReceiver.setBucket(0, b11, true, mock(PageResultListener.class));
}
});
Bucket b2 = new ArrayBucket(new Object[][] { new Object[] { 4 } });
bucketReceiver.setBucket(1, b2, true, mock(PageResultListener.class));
List<Object[]> result = batchConsumer.getResult();
assertThat(TestingHelpers.printedTable(new CollectionBucket(result)), is("1\n" + "1\n" + "2\n" + "2\n" + "4\n"));
}
use of io.crate.data.CollectionBucket in project crate by crate.
the class DistResultRXTaskTest method test_batch_iterator_is_completed_exceptionally_if_merge_buckets_on_next_page_fails.
@Test
public void test_batch_iterator_is_completed_exceptionally_if_merge_buckets_on_next_page_fails() throws Exception {
TestingRowConsumer batchConsumer = new TestingRowConsumer();
DistResultRXTask ctx = getPageDownstreamContext(batchConsumer, new FailOnMergePagingIterator<>(2), 2);
PageBucketReceiver bucketReceiver = ctx.getBucketReceiver((byte) 0);
assertThat(bucketReceiver, notNullValue());
PageResultListener pageResultListener = mock(PageResultListener.class);
Bucket bucket = new CollectionBucket(Collections.singletonList(new Object[] { "foo" }));
bucketReceiver.setBucket(0, bucket, false, pageResultListener);
bucketReceiver.setBucket(1, bucket, false, pageResultListener);
bucketReceiver.setBucket(0, bucket, true, pageResultListener);
bucketReceiver.setBucket(1, bucket, true, pageResultListener);
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("raised on merge");
batchConsumer.getResult();
}
use of io.crate.data.CollectionBucket in project crate by crate.
the class DistResultRXTaskTest method test_batch_iterator_is_completed_exceptionally_if_first_merge_buckets_fails.
@Test
public void test_batch_iterator_is_completed_exceptionally_if_first_merge_buckets_fails() throws Exception {
TestingRowConsumer batchConsumer = new TestingRowConsumer();
DistResultRXTask ctx = getPageDownstreamContext(batchConsumer, new FailOnMergePagingIterator<>(1), 1);
PageBucketReceiver bucketReceiver = ctx.getBucketReceiver((byte) 0);
assertThat(bucketReceiver, notNullValue());
PageResultListener pageResultListener = mock(PageResultListener.class);
Bucket bucket = new CollectionBucket(Collections.singletonList(new Object[] { "foo" }));
bucketReceiver.setBucket(0, bucket, true, pageResultListener);
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("raised on merge");
batchConsumer.getResult();
}
use of io.crate.data.CollectionBucket in project crate by crate.
the class TableFunctionApplierTest method testFunctionsAreApplied.
@Test
public void testFunctionsAreApplied() {
Input<Iterable<Row>> fstFunc = () -> new CollectionBucket(Arrays.asList(new Object[] { 1 }, new Object[] { 2 }, new Object[] { 3 }));
Input<Iterable<Row>> sndFunc = () -> new CollectionBucket(Arrays.asList(new Object[] { 4 }, new Object[] { 5 }));
TableFunctionApplier tableFunctionApplier = new TableFunctionApplier(Arrays.asList(fstFunc, sndFunc), Collections.singletonList(Literal.of(10)), Collections.emptyList());
Iterator<Row> iterator = tableFunctionApplier.apply(new RowN(0));
assertThat(iterator.next().materialize(), is(new Object[] { 1, 4, 10 }));
assertThat(iterator.next().materialize(), is(new Object[] { 2, 5, 10 }));
assertThat(iterator.next().materialize(), is(new Object[] { 3, null, 10 }));
assertThat(iterator.hasNext(), is(false));
}
Aggregations