use of io.trino.spi.block.RunLengthEncodedBlock in project trino by trinodb.
the class OrcFileWriter method appendRows.
@Override
public void appendRows(Page dataPage) {
Block[] blocks = new Block[fileInputColumnIndexes.length];
boolean[] nullBlocksArray = new boolean[fileInputColumnIndexes.length];
boolean hasNullBlocks = false;
int positionCount = dataPage.getPositionCount();
for (int i = 0; i < fileInputColumnIndexes.length; i++) {
int inputColumnIndex = fileInputColumnIndexes[i];
if (inputColumnIndex < 0) {
hasNullBlocks = true;
blocks[i] = new RunLengthEncodedBlock(nullBlocks.get(i), positionCount);
} else {
blocks[i] = dataPage.getBlock(inputColumnIndex);
}
nullBlocksArray[i] = inputColumnIndex < 0;
}
if (transaction.isInsert() && useAcidSchema) {
Optional<boolean[]> nullBlocks = hasNullBlocks ? Optional.of(nullBlocksArray) : Optional.empty();
Block rowBlock = RowBlock.fromFieldBlocks(positionCount, nullBlocks, blocks);
blocks = buildAcidColumns(rowBlock, transaction);
}
Page page = new Page(dataPage.getPositionCount(), blocks);
try {
orcWriter.write(page);
} catch (IOException | UncheckedIOException e) {
throw new TrinoException(HIVE_WRITER_DATA_ERROR, e);
}
}
use of io.trino.spi.block.RunLengthEncodedBlock in project trino by trinodb.
the class ParquetFileWriter method appendRows.
@Override
public void appendRows(Page dataPage) {
Block[] blocks = new Block[fileInputColumnIndexes.length];
for (int i = 0; i < fileInputColumnIndexes.length; i++) {
int inputColumnIndex = fileInputColumnIndexes[i];
if (inputColumnIndex < 0) {
blocks[i] = new RunLengthEncodedBlock(nullBlocks.get(i), dataPage.getPositionCount());
} else {
blocks[i] = dataPage.getBlock(inputColumnIndex);
}
}
Page page = new Page(dataPage.getPositionCount(), blocks);
try {
parquetWriter.write(page);
} catch (IOException | UncheckedIOException e) {
throw new TrinoException(HIVE_WRITER_DATA_ERROR, e);
}
}
use of io.trino.spi.block.RunLengthEncodedBlock in project trino by trinodb.
the class IcebergPageSource method getNextPage.
@Override
public Page getNextPage() {
try {
Page dataPage = delegate.getNextPage();
if (projectionsAdapter.isPresent()) {
dataPage = projectionsAdapter.get().adaptPage(dataPage);
}
if (dataPage == null) {
return null;
}
int batchSize = dataPage.getPositionCount();
Block[] blocks = new Block[prefilledBlocks.length];
for (int i = 0; i < prefilledBlocks.length; i++) {
if (prefilledBlocks[i] != null) {
blocks[i] = new RunLengthEncodedBlock(prefilledBlocks[i], batchSize);
} else {
blocks[i] = dataPage.getBlock(delegateIndexes[i]);
}
}
return new Page(batchSize, blocks);
} catch (RuntimeException e) {
closeWithSuppression(e);
throwIfInstanceOf(e, TrinoException.class);
throw new TrinoException(ICEBERG_BAD_DATA, e);
}
}
use of io.trino.spi.block.RunLengthEncodedBlock in project trino by trinodb.
the class ProjectingPagesWindowIndex method getRawBlock.
@Override
public Block getRawBlock(int channel, int position) {
if (channel < firstProjectedChannel) {
return pagesIndex.getRawBlock(channel, position(position));
}
int channelIndex = channel - firstProjectedChannel;
Block compute = compute(position, channelIndex);
// if there are no non-projected columns, no correction needed
if (firstProjectedChannel == 0) {
return compute;
}
// projection always creates a single row block, and will not align with the blocks from the pages index,
// so we use an RLE block of the same length as the raw block
int rawBlockPositionCount = pagesIndex.getRawBlock(0, position(position)).getPositionCount();
return new RunLengthEncodedBlock(compute, rawBlockPositionCount);
}
use of io.trino.spi.block.RunLengthEncodedBlock in project trino by trinodb.
the class TestAggregationOperator method testDistinctMaskWithNulls.
@Test
public void testDistinctMaskWithNulls() {
AggregatorFactory distinctFactory = LONG_SUM.createDistinctAggregatorFactory(SINGLE, ImmutableList.of(0), OptionalInt.of(1));
DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION).addPipelineContext(0, true, true, false).addDriverContext();
OperatorFactory operatorFactory = new AggregationOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(distinctFactory));
ByteArrayBlock trueMaskAllNull = new ByteArrayBlock(4, Optional.of(new boolean[] { true, true, true, true }), /* all positions are null */
new byte[] { 1, 1, 1, 1 });
/* non-zero value is true, all masks are true */
Block trueNullRleMask = new RunLengthEncodedBlock(trueMaskAllNull.getSingleValueBlock(0), 4);
List<Page> nullTrueMaskInput = ImmutableList.of(new Page(4, createLongsBlock(1, 2, 3, 4), trueMaskAllNull), new Page(4, createLongsBlock(10, 11, 10, 11), createBooleansBlock(true, true, true, true)), new Page(4, createLongsBlock(5, 6, 7, 8), trueNullRleMask));
MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT).row(21L).build();
assertOperatorEquals(operatorFactory, driverContext, nullTrueMaskInput, expected);
}
Aggregations