use of io.trino.spi.block.LazyBlock in project trino by trinodb.
the class TestReaderProjectionsAdapter method testLazyDereferenceProjectionLoading.
@Test
public void testLazyDereferenceProjectionLoading() {
List<HiveColumnHandle> columns = ImmutableList.of(createProjectedColumnHandle(TEST_FULL_COLUMNS.get("col"), ImmutableList.of(0, 0)));
List<Object> inputBlockData = new ArrayList<>();
inputBlockData.add(rowData(rowData(11L, 12L, 13L), 1L));
inputBlockData.add(rowData(null, 2L));
inputBlockData.add(null);
inputBlockData.add(rowData(rowData(31L, 32L, 33L), 3L));
// Produce an output page by applying adaptation
Optional<ReaderColumns> readerProjections = projectBaseColumns(columns);
ReaderProjectionsAdapter adapter = new ReaderProjectionsAdapter(columns.stream().map(ColumnHandle.class::cast).collect(toImmutableList()), readerProjections.get(), column -> ((HiveColumnHandle) column).getType(), HivePageSourceProvider::getProjection);
Page inputPage = createPage(ImmutableList.of(inputBlockData), adapter.getInputTypes());
adapter.adaptPage(inputPage).getLoadedPage();
// Verify that only the block corresponding to subfield "col.f_row_0.f_bigint_0" should be completely loaded, others are not.
// Assertion for "col"
Block lazyBlockLevel1 = inputPage.getBlock(0);
assertTrue(lazyBlockLevel1 instanceof LazyBlock);
assertFalse(lazyBlockLevel1.isLoaded());
RowBlock rowBlockLevel1 = ((RowBlock) (((LazyBlock) lazyBlockLevel1).getBlock()));
assertFalse(rowBlockLevel1.isLoaded());
// Assertion for "col.f_row_0" and col.f_bigint_0"
ColumnarRow columnarRowLevel1 = toColumnarRow(rowBlockLevel1);
assertFalse(columnarRowLevel1.getField(0).isLoaded());
assertFalse(columnarRowLevel1.getField(1).isLoaded());
Block lazyBlockLevel2 = columnarRowLevel1.getField(0);
assertTrue(lazyBlockLevel2 instanceof LazyBlock);
RowBlock rowBlockLevel2 = ((RowBlock) (((LazyBlock) lazyBlockLevel2).getBlock()));
assertFalse(rowBlockLevel2.isLoaded());
ColumnarRow columnarRowLevel2 = toColumnarRow(rowBlockLevel2);
// Assertion for "col.f_row_0.f_bigint_0" and "col.f_row_0.f_bigint_1"
assertTrue(columnarRowLevel2.getField(0).isLoaded());
assertFalse(columnarRowLevel2.getField(1).isLoaded());
}
use of io.trino.spi.block.LazyBlock in project trino by trinodb.
the class TestInputPageProjection method testLazyInputPage.
@Test
public void testLazyInputPage() {
InputPageProjection projection = new InputPageProjection(0, BIGINT);
Block block = createLongSequenceBlock(0, 100);
Block result = projection.project(SESSION, new DriverYieldSignal(), new Page(block), SelectedPositions.positionsRange(0, 100)).getResult();
assertFalse(result instanceof LazyBlock);
block = lazyWrapper(block);
result = projection.project(SESSION, new DriverYieldSignal(), new Page(block), SelectedPositions.positionsRange(0, 100)).getResult();
assertTrue(result instanceof LazyBlock);
assertFalse(result.isLoaded());
}
use of io.trino.spi.block.LazyBlock in project trino by trinodb.
the class TestPageProcessor method testSelectNoneFilterLazyLoad.
@Test
public void testSelectNoneFilterLazyLoad() {
PageProcessor pageProcessor = new PageProcessor(Optional.of(new SelectNoneFilter()), ImmutableList.of(new InputPageProjection(1, BIGINT)));
// if channel 1 is loaded, test will fail
Page inputPage = new Page(createLongSequenceBlock(0, 100), new LazyBlock(100, () -> {
throw new AssertionError("Lazy block should not be loaded");
}));
LocalMemoryContext memoryContext = newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName());
Iterator<Optional<Page>> output = pageProcessor.process(SESSION, new DriverYieldSignal(), memoryContext, inputPage);
assertEquals(memoryContext.getBytes(), 0);
List<Optional<Page>> outputPages = ImmutableList.copyOf(output);
assertEquals(outputPages.size(), 0);
}
use of io.trino.spi.block.LazyBlock in project trino by trinodb.
the class TestHashJoinOperator method testUnwrapsLazyBlocks.
@Test
public void testUnwrapsLazyBlocks() {
TaskContext taskContext = createTaskContext();
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
InternalJoinFilterFunction filterFunction = new TestInternalJoinFilterFunction(((leftPosition, leftPage, rightPosition, rightPage) -> {
// force loading of probe block
rightPage.getBlock(1).getLoadedBlock();
return true;
}));
RowPagesBuilder buildPages = rowPagesBuilder(false, Ints.asList(0), ImmutableList.of(BIGINT)).addSequencePage(1, 0);
BuildSideSetup buildSideSetup = setupBuildSide(nodePartitioningManager, true, taskContext, buildPages, Optional.of(filterFunction), false, SINGLE_STREAM_SPILLER_FACTORY);
JoinBridgeManager<PartitionedLookupSourceFactory> lookupSourceFactory = buildSideSetup.getLookupSourceFactoryManager();
RowPagesBuilder probePages = rowPagesBuilder(false, Ints.asList(0), ImmutableList.of(BIGINT, BIGINT));
List<Page> probeInput = probePages.addSequencePage(1, 0, 0).build();
probeInput = probeInput.stream().map(page -> new Page(page.getBlock(0), new LazyBlock(1, () -> page.getBlock(1)))).collect(toImmutableList());
OperatorFactory joinOperatorFactory = operatorFactories.innerJoin(0, new PlanNodeId("test"), lookupSourceFactory, false, false, true, probePages.getTypes(), Ints.asList(0), getHashChannelAsInt(probePages), Optional.empty(), OptionalInt.of(1), PARTITIONING_SPILLER_FACTORY, TYPE_OPERATOR_FACTORY);
instantiateBuildDrivers(buildSideSetup, taskContext);
buildLookupSource(executor, buildSideSetup);
Operator operator = joinOperatorFactory.createOperator(driverContext);
assertTrue(operator.needsInput());
operator.addInput(probeInput.get(0));
operator.finish();
Page output = operator.getOutput();
assertFalse(output.getBlock(1) instanceof LazyBlock);
}
use of io.trino.spi.block.LazyBlock in project trino by trinodb.
the class HivePageSource method getNextPage.
@Override
public Page getNextPage() {
try {
Page dataPage = delegate.getNextPage();
if (dataPage == null) {
return null;
}
if (projectionsAdapter.isPresent()) {
dataPage = projectionsAdapter.get().adaptPage(dataPage);
}
if (bucketAdapter.isPresent()) {
dataPage = bucketAdapter.get().filterPageToEligibleRowsOrDiscard(dataPage);
if (dataPage == null) {
return null;
}
}
int batchSize = dataPage.getPositionCount();
List<Block> blocks = new ArrayList<>();
for (int fieldId = 0; fieldId < columnMappings.size(); fieldId++) {
ColumnMapping columnMapping = columnMappings.get(fieldId);
switch(columnMapping.getKind()) {
case PREFILLED:
case EMPTY:
blocks.add(RunLengthEncodedBlock.create(types[fieldId], prefilledValues[fieldId], batchSize));
break;
case REGULAR:
case SYNTHESIZED:
Block block = dataPage.getBlock(columnMapping.getIndex());
Optional<Function<Block, Block>> coercer = coercers.get(fieldId);
if (coercer.isPresent()) {
block = new LazyBlock(batchSize, new CoercionLazyBlockLoader(block, coercer.get()));
}
blocks.add(block);
break;
case INTERIM:
// interim columns don't show up in output
break;
default:
throw new UnsupportedOperationException();
}
}
Page page = new Page(batchSize, blocks.toArray(new Block[0]));
// bucket adaptation already validates that data is in the right bucket
if (bucketAdapter.isEmpty()) {
bucketValidator.ifPresent(validator -> validator.validate(page));
}
return page;
} catch (TrinoException e) {
closeAllSuppress(e, this);
throw e;
} catch (RuntimeException e) {
closeAllSuppress(e, this);
throw new TrinoException(HIVE_CURSOR_ERROR, e);
}
}
Aggregations