use of io.trino.spi.block.LazyBlock in project trino by trinodb.
the class TestMergePages method testMinRowCountThresholdWithLazyPages.
@Test
public void testMinRowCountThresholdWithLazyPages() {
Page page = createSequencePage(TYPES, 10);
LazyBlock channel1 = lazyWrapper(page.getBlock(0));
LazyBlock channel2 = lazyWrapper(page.getBlock(1));
LazyBlock channel3 = lazyWrapper(page.getBlock(2));
page = new Page(channel1, channel2, channel3);
WorkProcessor<Page> mergePages = mergePages(TYPES, 1024 * 1024, page.getPositionCount() * 2, Integer.MAX_VALUE, pagesSource(page), newSimpleAggregatedMemoryContext());
assertTrue(mergePages.process());
assertFalse(mergePages.isFinished());
Page result = mergePages.getResult();
assertFalse(channel1.isLoaded());
assertFalse(channel2.isLoaded());
assertFalse(channel3.isLoaded());
assertPageEquals(TYPES, result, page);
}
use of io.trino.spi.block.LazyBlock in project trino by trinodb.
the class TestPageProcessor method testProjectLazyLoad.
@Test
public void testProjectLazyLoad() {
PageProcessor pageProcessor = new PageProcessor(Optional.of(new SelectAllFilter()), ImmutableList.of(new LazyPagePageProjection()), OptionalInt.of(MAX_BATCH_SIZE));
// if channel 1 is loaded, test will fail
Page inputPage = new Page(createLongSequenceBlock(0, 100), new LazyBlock(100, () -> {
throw new AssertionError("Lazy block should not be loaded");
}));
LocalMemoryContext memoryContext = newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName());
Iterator<Optional<Page>> output = pageProcessor.process(SESSION, new DriverYieldSignal(), memoryContext, inputPage);
List<Optional<Page>> outputPages = ImmutableList.copyOf(output);
assertEquals(outputPages.size(), 1);
assertPageEquals(ImmutableList.of(BIGINT), outputPages.get(0).orElse(null), new Page(createLongSequenceBlock(0, 100)));
}
use of io.trino.spi.block.LazyBlock in project trino by trinodb.
the class DeltaLakeWriter method appendRows.
@Override
public void appendRows(Page originalPage) {
Page page = originalPage;
if (timestampColumnIndices.size() > 0) {
Block[] translatedBlocks = new Block[originalPage.getChannelCount()];
for (int index = 0; index < translatedBlocks.length; index++) {
Block originalBlock = originalPage.getBlock(index);
if (timestampColumnIndices.contains(index)) {
translatedBlocks[index] = new LazyBlock(originalBlock.getPositionCount(), new TimestampTranslationBlockLoader(originalBlock));
} else {
translatedBlocks[index] = originalBlock;
}
}
page = new Page(originalPage.getPositionCount(), translatedBlocks);
}
stats.addInputPageSizesInBytes(page.getRetainedSizeInBytes());
fileWriter.appendRows(page);
rowCount += page.getPositionCount();
inputSizeInBytes += page.getSizeInBytes();
}
use of io.trino.spi.block.LazyBlock in project trino by trinodb.
the class UnionColumnReader method getBlocks.
private Block[] getBlocks(int positionCount) throws IOException {
if (dataStream == null) {
throw new OrcCorruptionException(column.getOrcDataSourceId(), "Value is not null but data stream is missing");
}
Block[] blocks = new Block[fieldReaders.size() + 1];
byte[] tags = dataStream.next(positionCount);
blocks[0] = new ByteArrayBlock(positionCount, Optional.empty(), tags);
boolean[][] valueIsNonNull = new boolean[fieldReaders.size()][positionCount];
int[] nonNullValueCount = new int[fieldReaders.size()];
for (int i = 0; i < positionCount; i++) {
valueIsNonNull[tags[i]][i] = true;
nonNullValueCount[tags[i]]++;
}
for (int i = 0; i < fieldReaders.size(); i++) {
Type fieldType = type.getTypeParameters().get(i + 1);
if (nonNullValueCount[i] > 0) {
ColumnReader reader = fieldReaders.get(i);
reader.prepareNextRead(nonNullValueCount[i]);
Block rawBlock = blockFactory.createBlock(nonNullValueCount[i], reader::readBlock, true);
blocks[i + 1] = new LazyBlock(positionCount, new UnpackLazyBlockLoader(rawBlock, fieldType, valueIsNonNull[i]));
} else {
blocks[i + 1] = new RunLengthEncodedBlock(fieldType.createBlockBuilder(null, 1).appendNull().build(), positionCount);
}
}
return blocks;
}
use of io.trino.spi.block.LazyBlock in project trino by trinodb.
the class TestScanFilterAndProjectOperator method testPageSourceLazyLoad.
@Test
public void testPageSourceLazyLoad() {
Block inputBlock = BlockAssertions.createLongSequenceBlock(0, 100);
// If column 1 is loaded, test will fail
Page input = new Page(100, inputBlock, new LazyBlock(100, () -> {
throw new AssertionError("Lazy block should not be loaded");
}));
DriverContext driverContext = newDriverContext();
List<RowExpression> projections = ImmutableList.of(field(0, VARCHAR));
Supplier<CursorProcessor> cursorProcessor = functionAssertions.getExpressionCompiler().compileCursorProcessor(Optional.empty(), projections, "key");
PageProcessor pageProcessor = new PageProcessor(Optional.of(new SelectAllFilter()), ImmutableList.of(new LazyPagePageProjection()));
ScanFilterAndProjectOperator.ScanFilterAndProjectOperatorFactory factory = new ScanFilterAndProjectOperator.ScanFilterAndProjectOperatorFactory(0, new PlanNodeId("test"), new PlanNodeId("0"), (session, split, table, columns, dynamicFilter) -> new SinglePagePageSource(input), cursorProcessor, () -> pageProcessor, TEST_TABLE_HANDLE, ImmutableList.of(), DynamicFilter.EMPTY, ImmutableList.of(BIGINT), DataSize.ofBytes(0), 0);
SourceOperator operator = factory.createOperator(driverContext);
operator.addSplit(new Split(new CatalogName("test"), TestingSplit.createLocalSplit(), Lifespan.taskWide()));
operator.noMoreSplits();
MaterializedResult expected = toMaterializedResult(driverContext.getSession(), ImmutableList.of(BIGINT), ImmutableList.of(new Page(inputBlock)));
MaterializedResult actual = toMaterializedResult(driverContext.getSession(), ImmutableList.of(BIGINT), toPages(operator));
assertEquals(actual.getRowCount(), expected.getRowCount());
assertEquals(actual, expected);
}
Aggregations