use of io.trino.spi.PageBuilder in project trino by trinodb.
the class BenchmarkGroupByHash method createBigintPages.
private static List<Page> createBigintPages(int positionCount, int groupCount, int channelCount, boolean hashEnabled, boolean useMixedBlockTypes) {
List<Type> types = Collections.nCopies(channelCount, BIGINT);
ImmutableList.Builder<Page> pages = ImmutableList.builder();
if (hashEnabled) {
types = ImmutableList.copyOf(Iterables.concat(types, ImmutableList.of(BIGINT)));
}
PageBuilder pageBuilder = new PageBuilder(types);
int pageCount = 0;
for (int position = 0; position < positionCount; position++) {
int rand = ThreadLocalRandom.current().nextInt(groupCount);
pageBuilder.declarePosition();
for (int numChannel = 0; numChannel < channelCount; numChannel++) {
BIGINT.writeLong(pageBuilder.getBlockBuilder(numChannel), rand);
}
if (hashEnabled) {
BIGINT.writeLong(pageBuilder.getBlockBuilder(channelCount), AbstractLongType.hash(rand));
}
if (pageBuilder.isFull()) {
Page page = pageBuilder.build();
pageBuilder.reset();
if (useMixedBlockTypes) {
if (pageCount % 3 == 0) {
pages.add(page);
} else if (pageCount % 3 == 1) {
// rle page
Block[] blocks = new Block[page.getChannelCount()];
for (int channel = 0; channel < blocks.length; ++channel) {
blocks[channel] = new RunLengthEncodedBlock(page.getBlock(channel).getSingleValueBlock(0), page.getPositionCount());
}
pages.add(new Page(blocks));
} else {
// dictionary page
int[] positions = IntStream.range(0, page.getPositionCount()).toArray();
Block[] blocks = new Block[page.getChannelCount()];
for (int channel = 0; channel < page.getChannelCount(); ++channel) {
blocks[channel] = new DictionaryBlock(page.getBlock(channel), positions);
}
pages.add(new Page(blocks));
}
} else {
pages.add(page);
}
pageCount++;
}
}
pages.add(pageBuilder.build());
return pages.build();
}
use of io.trino.spi.PageBuilder in project trino by trinodb.
the class AbstractRowChangeOperator method getOutput.
@Override
public Page getOutput() {
if ((state != State.FINISHING) || !finishFuture.isDone()) {
return null;
}
state = State.FINISHED;
Collection<Slice> fragments = getFutureValue(finishFuture);
// output page will only be constructed once,
// so a new PageBuilder is constructed (instead of using PageBuilder.reset)
PageBuilder page = new PageBuilder(fragments.size() + 1, TYPES);
BlockBuilder rowsBuilder = page.getBlockBuilder(0);
BlockBuilder fragmentBuilder = page.getBlockBuilder(1);
// write row count
page.declarePosition();
BIGINT.writeLong(rowsBuilder, rowCount);
fragmentBuilder.appendNull();
// write fragments
for (Slice fragment : fragments) {
page.declarePosition();
rowsBuilder.appendNull();
VARBINARY.writeSlice(fragmentBuilder, fragment);
}
return page.build();
}
use of io.trino.spi.PageBuilder in project trino by trinodb.
the class AggregationOperator method getOutput.
@Override
public Page getOutput() {
if (state != State.HAS_OUTPUT) {
return null;
}
// project results into output blocks
List<Type> types = aggregates.stream().map(Aggregator::getType).collect(toImmutableList());
// output page will only be constructed once,
// so a new PageBuilder is constructed (instead of using PageBuilder.reset)
PageBuilder pageBuilder = new PageBuilder(1, types);
pageBuilder.declarePosition();
for (int i = 0; i < aggregates.size(); i++) {
Aggregator aggregator = aggregates.get(i);
BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(i);
aggregator.evaluate(blockBuilder);
}
state = State.FINISHED;
return pageBuilder.build();
}
use of io.trino.spi.PageBuilder in project trino by trinodb.
the class TestHivePageSink method writeTestFile.
private static long writeTestFile(HiveConfig config, HiveMetastore metastore, String outputPath) {
HiveTransactionHandle transaction = new HiveTransactionHandle(false);
HiveWriterStats stats = new HiveWriterStats();
ConnectorPageSink pageSink = createPageSink(transaction, config, metastore, new Path("file:///" + outputPath), stats);
List<LineItemColumn> columns = getTestColumns();
List<Type> columnTypes = columns.stream().map(LineItemColumn::getType).map(TestHivePageSink::getHiveType).map(hiveType -> hiveType.getType(TESTING_TYPE_MANAGER)).collect(toList());
PageBuilder pageBuilder = new PageBuilder(columnTypes);
int rows = 0;
for (LineItem lineItem : new LineItemGenerator(0.01, 1, 1)) {
rows++;
if (rows >= NUM_ROWS) {
break;
}
pageBuilder.declarePosition();
for (int i = 0; i < columns.size(); i++) {
LineItemColumn column = columns.get(i);
BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(i);
switch(column.getType().getBase()) {
case IDENTIFIER:
BIGINT.writeLong(blockBuilder, column.getIdentifier(lineItem));
break;
case INTEGER:
INTEGER.writeLong(blockBuilder, column.getInteger(lineItem));
break;
case DATE:
DATE.writeLong(blockBuilder, column.getDate(lineItem));
break;
case DOUBLE:
DOUBLE.writeDouble(blockBuilder, column.getDouble(lineItem));
break;
case VARCHAR:
createUnboundedVarcharType().writeSlice(blockBuilder, Slices.utf8Slice(column.getString(lineItem)));
break;
default:
throw new IllegalArgumentException("Unsupported type " + column.getType());
}
}
}
Page page = pageBuilder.build();
pageSink.appendPage(page);
getFutureValue(pageSink.finish());
File outputDir = new File(outputPath);
List<File> files = ImmutableList.copyOf(outputDir.listFiles((dir, name) -> !name.endsWith(".crc")));
File outputFile = getOnlyElement(files);
long length = outputFile.length();
ConnectorPageSource pageSource = createPageSource(transaction, config, outputFile);
List<Page> pages = new ArrayList<>();
while (!pageSource.isFinished()) {
Page nextPage = pageSource.getNextPage();
if (nextPage != null) {
pages.add(nextPage.getLoadedPage());
}
}
MaterializedResult expectedResults = toMaterializedResult(getHiveSession(config), columnTypes, ImmutableList.of(page));
MaterializedResult results = toMaterializedResult(getHiveSession(config), columnTypes, pages);
assertEquals(results, expectedResults);
assertEquals(round(stats.getInputPageSizeInBytes().getAllTime().getMax()), page.getRetainedSizeInBytes());
return length;
}
use of io.trino.spi.PageBuilder in project trino by trinodb.
the class ArrayJoin method arrayJoin.
@UsedByGeneratedCode
public static Slice arrayJoin(MethodHandle castFunction, Object state, ConnectorSession session, Block arrayBlock, Slice delimiter, Slice nullReplacement) {
PageBuilder pageBuilder = (PageBuilder) state;
if (pageBuilder.isFull()) {
pageBuilder.reset();
}
int numElements = arrayBlock.getPositionCount();
BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(0);
boolean needsDelimiter = false;
for (int i = 0; i < numElements; i++) {
Slice value = null;
if (!arrayBlock.isNull(i)) {
try {
value = (Slice) castFunction.invokeExact(session, arrayBlock, i);
} catch (Throwable throwable) {
// Restore pageBuilder into a consistent state
blockBuilder.closeEntry();
pageBuilder.declarePosition();
throw new TrinoException(GENERIC_INTERNAL_ERROR, "Error casting array element to VARCHAR", throwable);
}
}
if (value == null) {
value = nullReplacement;
if (value == null) {
continue;
}
}
if (needsDelimiter) {
blockBuilder.writeBytes(delimiter, 0, delimiter.length());
}
blockBuilder.writeBytes(value, 0, value.length());
needsDelimiter = true;
}
blockBuilder.closeEntry();
pageBuilder.declarePosition();
return VARCHAR.getSlice(blockBuilder, blockBuilder.getPositionCount() - 1);
}
Aggregations