use of io.trino.plugin.hive.rcfile.RcFilePageSourceFactory in project trino by trinodb.
the class TestHiveFileFormats method testRCTextProjectedColumnsPageSource.
@Test(dataProvider = "rowCount")
public void testRCTextProjectedColumnsPageSource(int rowCount) throws Exception {
List<TestColumn> supportedColumns = TEST_COLUMNS;
List<TestColumn> regularColumns = getRegularColumns(supportedColumns);
List<TestColumn> partitionColumns = getPartitionColumns(supportedColumns);
// Created projected columns for all regular supported columns
ImmutableList.Builder<TestColumn> writeColumnsBuilder = ImmutableList.builder();
ImmutableList.Builder<TestColumn> readeColumnsBuilder = ImmutableList.builder();
generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder);
List<TestColumn> writeColumns = writeColumnsBuilder.addAll(partitionColumns).build();
List<TestColumn> readColumns = readeColumnsBuilder.addAll(partitionColumns).build();
assertThatFileFormat(RCTEXT).withWriteColumns(writeColumns).withReadColumns(readColumns).withRowsCount(rowCount).isReadableByPageSource(new RcFilePageSourceFactory(TESTING_TYPE_MANAGER, HDFS_ENVIRONMENT, STATS, new HiveConfig()));
}
use of io.trino.plugin.hive.rcfile.RcFilePageSourceFactory in project trino by trinodb.
the class TestHiveFileFormats method testRcBinaryPageSource.
@Test(dataProvider = "rowCount")
public void testRcBinaryPageSource(int rowCount) throws Exception {
// RCBinary does not support complex type as key of a map and interprets empty VARCHAR as nulls
// Hive binary writers are broken for timestamps
List<TestColumn> testColumns = TEST_COLUMNS.stream().filter(testColumn -> !testColumn.getName().equals("t_empty_varchar")).filter(TestHiveFileFormats::withoutTimestamps).collect(toList());
assertThatFileFormat(RCBINARY).withColumns(testColumns).withRowsCount(rowCount).isReadableByPageSource(new RcFilePageSourceFactory(TESTING_TYPE_MANAGER, HDFS_ENVIRONMENT, STATS, new HiveConfig()));
}
use of io.trino.plugin.hive.rcfile.RcFilePageSourceFactory in project trino by trinodb.
the class TestHiveFileFormats method testRCBinaryProjectedColumns.
@Test(dataProvider = "rowCount")
public void testRCBinaryProjectedColumns(int rowCount) throws Exception {
// RCBinary does not support complex type as key of a map and interprets empty VARCHAR as nulls
List<TestColumn> supportedColumns = TEST_COLUMNS.stream().filter(testColumn -> {
String name = testColumn.getName();
return !name.equals("t_map_null_key_complex_key_value") && !name.equals("t_empty_varchar");
}).collect(toList());
List<TestColumn> regularColumns = getRegularColumns(supportedColumns);
List<TestColumn> partitionColumns = getPartitionColumns(supportedColumns);
// Created projected columns for all regular supported columns
ImmutableList.Builder<TestColumn> writeColumnsBuilder = ImmutableList.builder();
ImmutableList.Builder<TestColumn> readeColumnsBuilder = ImmutableList.builder();
generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder);
List<TestColumn> writeColumns = writeColumnsBuilder.addAll(partitionColumns).build();
List<TestColumn> readColumns = readeColumnsBuilder.addAll(partitionColumns).build();
assertThatFileFormat(RCBINARY).withWriteColumns(writeColumns).withReadColumns(readColumns).withRowsCount(rowCount).withFileWriterFactory(new RcFileFileWriterFactory(HDFS_ENVIRONMENT, TESTING_TYPE_MANAGER, new NodeVersion("test"), HIVE_STORAGE_TIME_ZONE, STATS)).isReadableByPageSource(new RcFilePageSourceFactory(TESTING_TYPE_MANAGER, HDFS_ENVIRONMENT, STATS, new HiveConfig()));
}
use of io.trino.plugin.hive.rcfile.RcFilePageSourceFactory in project trino by trinodb.
the class TestHiveFileFormats method testRcTextOptimizedWriter.
@Test(dataProvider = "rowCount")
public void testRcTextOptimizedWriter(int rowCount) throws Exception {
List<TestColumn> testColumns = TEST_COLUMNS.stream().filter(TestHiveFileFormats::withoutNullMapKeyTests).collect(toImmutableList());
assertThatFileFormat(RCTEXT).withColumns(testColumns).withRowsCount(rowCount).withFileWriterFactory(new RcFileFileWriterFactory(HDFS_ENVIRONMENT, TESTING_TYPE_MANAGER, new NodeVersion("test"), HIVE_STORAGE_TIME_ZONE, STATS)).isReadableByRecordCursor(createGenericHiveRecordCursorProvider(HDFS_ENVIRONMENT)).isReadableByPageSource(new RcFilePageSourceFactory(TESTING_TYPE_MANAGER, HDFS_ENVIRONMENT, STATS, new HiveConfig()));
}
use of io.trino.plugin.hive.rcfile.RcFilePageSourceFactory in project trino by trinodb.
the class TestHiveFileFormats method testRCBinaryProjectedColumnsPageSource.
@Test(dataProvider = "rowCount")
public void testRCBinaryProjectedColumnsPageSource(int rowCount) throws Exception {
// RCBinary does not support complex type as key of a map and interprets empty VARCHAR as nulls
List<TestColumn> supportedColumns = TEST_COLUMNS.stream().filter(testColumn -> !testColumn.getName().equals("t_empty_varchar")).collect(toList());
List<TestColumn> regularColumns = getRegularColumns(supportedColumns);
List<TestColumn> partitionColumns = getPartitionColumns(supportedColumns);
// Created projected columns for all regular supported columns
ImmutableList.Builder<TestColumn> writeColumnsBuilder = ImmutableList.builder();
ImmutableList.Builder<TestColumn> readeColumnsBuilder = ImmutableList.builder();
generateProjectedColumns(regularColumns, writeColumnsBuilder, readeColumnsBuilder);
List<TestColumn> writeColumns = writeColumnsBuilder.addAll(partitionColumns).build();
List<TestColumn> readColumns = readeColumnsBuilder.addAll(partitionColumns).build();
assertThatFileFormat(RCBINARY).withWriteColumns(writeColumns).withReadColumns(readColumns).withRowsCount(rowCount).withFileWriterFactory(new RcFileFileWriterFactory(HDFS_ENVIRONMENT, TESTING_TYPE_MANAGER, new NodeVersion("test"), HIVE_STORAGE_TIME_ZONE, STATS)).isReadableByPageSource(new RcFilePageSourceFactory(TESTING_TYPE_MANAGER, HDFS_ENVIRONMENT, STATS, new HiveConfig()));
}
Aggregations