use of io.trino.spi.type.Type in project trino by trinodb.
the class SortBuffer method appendPositionTo.
public static void appendPositionTo(Page page, int position, PageBuilder pageBuilder) {
pageBuilder.declarePosition();
for (int i = 0; i < page.getChannelCount(); i++) {
Type type = pageBuilder.getType(i);
Block block = page.getBlock(i);
BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(i);
type.appendTo(block, position, blockBuilder);
}
}
use of io.trino.spi.type.Type in project trino by trinodb.
the class TestHiveFileFormats method testRCBinaryProjectedColumns.
@Test(dataProvider = "rowCount")
public void testRCBinaryProjectedColumns(int rowCount) throws Exception {
// RCBinary does not support complex type as key of a map and interprets empty VARCHAR as nulls
List<TestColumn> supportedColumns = TEST_COLUMNS.stream().filter(testColumn -> {
String name = testColumn.getName();
return !name.equals("t_map_null_key_complex_key_value") && !name.equals("t_empty_varchar");
}).collect(toList());
List<TestColumn> regularColumns = getRegularColumns(supportedColumns);
List<TestColumn> partitionColumns = getPartitionColumns(supportedColumns);
// Created projected columns for all regular supported columns
ImmutableList.Builder<TestColumn> writeColumnsBuilder = ImmutableList.builder();
ImmutableList.Builder<TestColumn> readeColumnsBuilder = ImmutableList.builder();
generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder);
List<TestColumn> writeColumns = writeColumnsBuilder.addAll(partitionColumns).build();
List<TestColumn> readColumns = readeColumnsBuilder.addAll(partitionColumns).build();
assertThatFileFormat(RCBINARY).withWriteColumns(writeColumns).withReadColumns(readColumns).withRowsCount(rowCount).withFileWriterFactory(new RcFileFileWriterFactory(HDFS_ENVIRONMENT, TESTING_TYPE_MANAGER, new NodeVersion("test"), HIVE_STORAGE_TIME_ZONE, STATS)).isReadableByPageSource(new RcFilePageSourceFactory(TESTING_TYPE_MANAGER, HDFS_ENVIRONMENT, STATS, new HiveConfig()));
}
use of io.trino.spi.type.Type in project trino by trinodb.
the class BaseHiveConnectorTest method testShowColumnsPartitionKey.
@Test
public void testShowColumnsPartitionKey() {
assertUpdate("" + "CREATE TABLE test_show_columns_partition_key\n" + "(grape bigint, orange bigint, pear varchar(65535), mango integer, lychee smallint, kiwi tinyint, apple varchar, pineapple varchar(65535))\n" + "WITH (partitioned_by = ARRAY['apple', 'pineapple'])");
MaterializedResult actual = computeActual("SHOW COLUMNS FROM test_show_columns_partition_key");
Type unboundedVarchar = canonicalizeType(VARCHAR);
MaterializedResult expected = resultBuilder(getSession(), unboundedVarchar, unboundedVarchar, unboundedVarchar, unboundedVarchar).row("grape", canonicalizeType(BIGINT).toString(), "", "").row("orange", canonicalizeType(BIGINT).toString(), "", "").row("pear", canonicalizeType(createVarcharType(65535)).toString(), "", "").row("mango", canonicalizeType(INTEGER).toString(), "", "").row("lychee", canonicalizeType(SMALLINT).toString(), "", "").row("kiwi", canonicalizeType(TINYINT).toString(), "", "").row("apple", canonicalizeType(VARCHAR).toString(), "partition key", "").row("pineapple", canonicalizeType(createVarcharType(65535)).toString(), "partition key", "").build();
assertEquals(actual, expected);
}
use of io.trino.spi.type.Type in project trino by trinodb.
the class BenchmarkFileFormatsUtils method createTpchDataSet.
public static <E extends TpchEntity> TestData createTpchDataSet(FileFormat format, TpchTable<E> tpchTable, List<TpchColumn<E>> columns) {
List<String> columnNames = columns.stream().map(TpchColumn::getColumnName).collect(toList());
List<Type> columnTypes = columns.stream().map(BenchmarkFileFormatsUtils::getColumnType).map(type -> format.supportsDate() || !DATE.equals(type) ? type : createUnboundedVarcharType()).collect(toList());
PageBuilder pageBuilder = new PageBuilder(columnTypes);
ImmutableList.Builder<Page> pages = ImmutableList.builder();
long dataSize = 0;
for (E row : tpchTable.createGenerator(10, 1, 1)) {
pageBuilder.declarePosition();
for (int i = 0; i < columns.size(); i++) {
TpchColumn<E> column = columns.get(i);
BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(i);
switch(column.getType().getBase()) {
case IDENTIFIER:
BIGINT.writeLong(blockBuilder, column.getIdentifier(row));
break;
case INTEGER:
INTEGER.writeLong(blockBuilder, column.getInteger(row));
break;
case DATE:
if (format.supportsDate()) {
DATE.writeLong(blockBuilder, column.getDate(row));
} else {
createUnboundedVarcharType().writeString(blockBuilder, column.getString(row));
}
break;
case DOUBLE:
DOUBLE.writeDouble(blockBuilder, column.getDouble(row));
break;
case VARCHAR:
createUnboundedVarcharType().writeSlice(blockBuilder, Slices.utf8Slice(column.getString(row)));
break;
default:
throw new IllegalArgumentException("Unsupported type " + column.getType());
}
}
if (pageBuilder.isFull()) {
Page page = pageBuilder.build();
pages.add(page);
pageBuilder.reset();
dataSize += page.getSizeInBytes();
if (dataSize >= MIN_DATA_SIZE) {
break;
}
}
}
if (!pageBuilder.isEmpty()) {
pages.add(pageBuilder.build());
}
return new TestData(columnNames, columnTypes, pages.build());
}
use of io.trino.spi.type.Type in project trino by trinodb.
the class TestReaderProjectionsAdapter method createProjectedColumnBlock.
private static Block createProjectedColumnBlock(Block data, Type finalType, Type blockType, List<Integer> dereferences) {
if (dereferences.size() == 0) {
return data;
}
BlockBuilder builder = finalType.createBlockBuilder(null, data.getPositionCount());
for (int i = 0; i < data.getPositionCount(); i++) {
Type sourceType = blockType;
Block currentData = null;
boolean isNull = data.isNull(i);
if (!isNull) {
// Get SingleRowBlock corresponding to element at position i
currentData = data.getObject(i, Block.class);
}
// Apply all dereferences except for the last one, because the type can be different
for (int j = 0; j < dereferences.size() - 1; j++) {
if (isNull) {
// If null element is discovered at any dereferencing step, break
break;
}
checkArgument(sourceType instanceof RowType);
if (currentData.isNull(dereferences.get(j))) {
currentData = null;
} else {
sourceType = ((RowType) sourceType).getFields().get(dereferences.get(j)).getType();
currentData = currentData.getObject(dereferences.get(j), Block.class);
}
isNull = isNull || (currentData == null);
}
if (isNull) {
// Append null if any of the elements in the dereference chain were null
builder.appendNull();
} else {
int lastDereference = dereferences.get(dereferences.size() - 1);
finalType.appendTo(currentData, lastDereference, builder);
}
}
return builder.build();
}
Aggregations