use of io.trino.spi.block.ColumnarRow in project trino by trinodb.
the class ListaggAggregationStateSerializer method deserialize.
@Override
public void deserialize(Block block, int index, ListaggAggregationState state) {
checkArgument(block instanceof AbstractRowBlock);
ColumnarRow columnarRow = toColumnarRow(block);
Slice separator = VARCHAR.getSlice(columnarRow.getField(0), index);
boolean overflowError = BOOLEAN.getBoolean(columnarRow.getField(1), index);
Slice overflowFiller = VARCHAR.getSlice(columnarRow.getField(2), index);
boolean showOverflowEntryCount = BOOLEAN.getBoolean(columnarRow.getField(3), index);
Block stateBlock = (Block) arrayType.getObject(columnarRow.getField(4), index);
state.reset();
state.setSeparator(separator);
state.setOverflowError(overflowError);
state.setOverflowFiller(overflowFiller);
state.setShowOverflowEntryCount(showOverflowEntryCount);
for (int i = 0; i < stateBlock.getPositionCount(); i++) {
state.add(stateBlock, i);
}
}
use of io.trino.spi.block.ColumnarRow in project trino by trinodb.
the class HiveUpdatablePageSource method updateRows.
@Override
public void updateRows(Page page, List<Integer> columnValueAndRowIdChannels) {
int positionCount = page.getPositionCount();
// should be filtered out by engine
verify(positionCount > 0, "Unexpected empty page");
HiveUpdateProcessor updateProcessor = transaction.getUpdateProcessor().orElseThrow(() -> new IllegalArgumentException("updateProcessor not present"));
ColumnarRow acidBlock = updateProcessor.getAcidBlock(page, columnValueAndRowIdChannels);
int fieldCount = acidBlock.getFieldCount();
checkArgument(fieldCount == 3 || fieldCount == 4, "The rowId block for UPDATE should have 3 or 4 children, but has %s", fieldCount);
deleteRowsInternal(acidBlock);
Block mergedColumnsBlock = updateProcessor.createMergedColumnsBlock(page, columnValueAndRowIdChannels);
Block currentTransactionBlock = RunLengthEncodedBlock.create(BIGINT, writeId, positionCount);
Block[] blockArray = { new RunLengthEncodedBlock(INSERT_OPERATION_BLOCK, positionCount), currentTransactionBlock, acidBlock.getField(BUCKET_CHANNEL), createRowIdBlock(positionCount), currentTransactionBlock, mergedColumnsBlock };
Page insertPage = new Page(blockArray);
lazyInitializeInsertFileWriter();
insertFileWriter.orElseThrow(() -> new IllegalArgumentException("insertFileWriter not present")).appendRows(insertPage);
}
use of io.trino.spi.block.ColumnarRow in project trino by trinodb.
the class HiveUpdatablePageSource method deleteRows.
@Override
public void deleteRows(Block rowIds) {
ColumnarRow acidBlock = toColumnarRow(rowIds);
int fieldCount = acidBlock.getFieldCount();
checkArgument(fieldCount == 3, "The rowId block for DELETE should have 3 children, but has %s", fieldCount);
deleteRowsInternal(acidBlock);
}
use of io.trino.spi.block.ColumnarRow in project trino by trinodb.
the class HiveUpdateProcessor method createMergedColumnsBlock.
/**
* @param page The first block in the page is a RowBlock, containing the three ACID
* columns - - originalTransaction, bucket and rowId - - plus a RowBlock containing
* the values of non-updated columns. The remaining blocks are the values of the updated
* columns, whose offsets given by columnValueAndRowIdChannels
* @return The RowBlock for updated and non-updated columns
*/
public Block createMergedColumnsBlock(Page page, List<Integer> columnValueAndRowIdChannels) {
requireNonNull(page, "page is null");
ColumnarRow acidBlock = getAcidBlock(page, columnValueAndRowIdChannels);
int fieldCount = acidBlock.getFieldCount();
List<Block> nonUpdatedColumnRowBlocks;
if (nonUpdatedColumns.isEmpty()) {
checkArgument(fieldCount == 3, "The ACID block must contain 3 children, but instead had %s children", fieldCount);
nonUpdatedColumnRowBlocks = ImmutableList.of();
} else {
checkArgument(fieldCount == 4, "The first RowBlock must contain 4 children, but instead had %s children", fieldCount);
Block lastAcidBlock = acidBlock.getField(3);
checkArgument(lastAcidBlock instanceof RowBlock, "The last block in the acidBlock must be a RowBlock, but instead was %s", lastAcidBlock);
ColumnarRow nonUpdatedColumnRow = toColumnarRow(lastAcidBlock);
ImmutableList.Builder<Block> builder = ImmutableList.builder();
for (int field = 0; field < nonUpdatedColumnRow.getFieldCount(); field++) {
builder.add(nonUpdatedColumnRow.getField(field));
}
nonUpdatedColumnRowBlocks = builder.build();
}
// Merge the non-updated and updated column blocks
Block[] dataColumnBlocks = new Block[allDataColumns.size()];
int targetColumnChannel = 0;
int nonUpdatedColumnChannel = 0;
int updatedColumnNumber = 0;
for (HiveColumnHandle column : allDataColumns) {
Block block;
if (nonUpdatedColumnNames.contains(column.getName())) {
block = nonUpdatedColumnRowBlocks.get(nonUpdatedColumnChannel);
nonUpdatedColumnChannel++;
} else {
int index = columnValueAndRowIdChannels.get(updatedColumnNumber);
block = page.getBlock(index);
updatedColumnNumber++;
}
dataColumnBlocks[targetColumnChannel] = block;
targetColumnChannel++;
}
return RowBlock.fromFieldBlocks(page.getPositionCount(), Optional.empty(), dataColumnBlocks);
}
use of io.trino.spi.block.ColumnarRow in project trino by trinodb.
the class StructColumnWriter method writeBlock.
@Override
public void writeBlock(Block block) {
checkState(!closed);
checkArgument(block.getPositionCount() > 0, "Block is empty");
ColumnarRow columnarRow = toColumnarRow(block);
writeColumnarRow(columnarRow);
}
Aggregations