use of io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER in project trino by trinodb.
the class TestHiveBucketing method assertBucketsEqual.
private static void assertBucketsEqual(List<String> hiveTypeStrings, List<List<Object>> hiveValues, int bucketCount, Optional<Set<Integer>> expectedBucketsV1, Optional<Set<Integer>> expectedBucketsV2) {
List<HiveType> hiveTypes = hiveTypeStrings.stream().map(HiveType::valueOf).collect(toImmutableList());
List<TypeInfo> hiveTypeInfos = hiveTypes.stream().map(HiveType::getTypeInfo).collect(toImmutableList());
List<Type> trinoTypes = hiveTypes.stream().map(type -> type.getType(TESTING_TYPE_MANAGER)).collect(toImmutableList());
ImmutableList.Builder<List<NullableValue>> values = ImmutableList.builder();
for (int i = 0; i < hiveValues.size(); i++) {
List<Object> valueList = hiveValues.get(i);
Type trinoType = trinoTypes.get(i);
values.add(valueList.stream().map(value -> new NullableValue(trinoType, toNativeContainerValue(trinoType, value))).collect(toImmutableList()));
}
assertEquals(getHiveBuckets(BUCKETING_V1, bucketCount, hiveTypeInfos, values.build()), expectedBucketsV1);
assertEquals(getHiveBuckets(BUCKETING_V2, bucketCount, hiveTypeInfos, values.build()), expectedBucketsV2);
}
use of io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER in project trino by trinodb.
the class TestHiveFileFormats method testRCBinaryProjectedColumnsPageSource.
@Test(dataProvider = "rowCount")
public void testRCBinaryProjectedColumnsPageSource(int rowCount) throws Exception {
// RCBinary does not support complex type as key of a map and interprets empty VARCHAR as nulls
List<TestColumn> supportedColumns = TEST_COLUMNS.stream().filter(testColumn -> !testColumn.getName().equals("t_empty_varchar")).collect(toList());
List<TestColumn> regularColumns = getRegularColumns(supportedColumns);
List<TestColumn> partitionColumns = getPartitionColumns(supportedColumns);
// Created projected columns for all regular supported columns
ImmutableList.Builder<TestColumn> writeColumnsBuilder = ImmutableList.builder();
ImmutableList.Builder<TestColumn> readeColumnsBuilder = ImmutableList.builder();
generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder);
List<TestColumn> writeColumns = writeColumnsBuilder.addAll(partitionColumns).build();
List<TestColumn> readColumns = readeColumnsBuilder.addAll(partitionColumns).build();
assertThatFileFormat(RCBINARY).withWriteColumns(writeColumns).withReadColumns(readColumns).withRowsCount(rowCount).withFileWriterFactory(new RcFileFileWriterFactory(HDFS_ENVIRONMENT, TESTING_TYPE_MANAGER, new NodeVersion("test"), HIVE_STORAGE_TIME_ZONE, STATS)).isReadableByPageSource(new RcFilePageSourceFactory(TESTING_TYPE_MANAGER, HDFS_ENVIRONMENT, STATS, new HiveConfig()));
}
use of io.trino.type.InternalTypeManager.TESTING_TYPE_MANAGER in project trino by trinodb.
the class TestHivePageSink method writeTestFile.
private static long writeTestFile(HiveConfig config, HiveMetastore metastore, String outputPath) {
HiveTransactionHandle transaction = new HiveTransactionHandle(false);
HiveWriterStats stats = new HiveWriterStats();
ConnectorPageSink pageSink = createPageSink(transaction, config, metastore, new Path("file:///" + outputPath), stats);
List<LineItemColumn> columns = getTestColumns();
List<Type> columnTypes = columns.stream().map(LineItemColumn::getType).map(TestHivePageSink::getHiveType).map(hiveType -> hiveType.getType(TESTING_TYPE_MANAGER)).collect(toList());
PageBuilder pageBuilder = new PageBuilder(columnTypes);
int rows = 0;
for (LineItem lineItem : new LineItemGenerator(0.01, 1, 1)) {
rows++;
if (rows >= NUM_ROWS) {
break;
}
pageBuilder.declarePosition();
for (int i = 0; i < columns.size(); i++) {
LineItemColumn column = columns.get(i);
BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(i);
switch(column.getType().getBase()) {
case IDENTIFIER:
BIGINT.writeLong(blockBuilder, column.getIdentifier(lineItem));
break;
case INTEGER:
INTEGER.writeLong(blockBuilder, column.getInteger(lineItem));
break;
case DATE:
DATE.writeLong(blockBuilder, column.getDate(lineItem));
break;
case DOUBLE:
DOUBLE.writeDouble(blockBuilder, column.getDouble(lineItem));
break;
case VARCHAR:
createUnboundedVarcharType().writeSlice(blockBuilder, Slices.utf8Slice(column.getString(lineItem)));
break;
default:
throw new IllegalArgumentException("Unsupported type " + column.getType());
}
}
}
Page page = pageBuilder.build();
pageSink.appendPage(page);
getFutureValue(pageSink.finish());
File outputDir = new File(outputPath);
List<File> files = ImmutableList.copyOf(outputDir.listFiles((dir, name) -> !name.endsWith(".crc")));
File outputFile = getOnlyElement(files);
long length = outputFile.length();
ConnectorPageSource pageSource = createPageSource(transaction, config, outputFile);
List<Page> pages = new ArrayList<>();
while (!pageSource.isFinished()) {
Page nextPage = pageSource.getNextPage();
if (nextPage != null) {
pages.add(nextPage.getLoadedPage());
}
}
MaterializedResult expectedResults = toMaterializedResult(getHiveSession(config), columnTypes, ImmutableList.of(page));
MaterializedResult results = toMaterializedResult(getHiveSession(config), columnTypes, pages);
assertEquals(results, expectedResults);
assertEquals(round(stats.getInputPageSizeInBytes().getAllTime().getMax()), page.getRetainedSizeInBytes());
return length;
}
Aggregations