use of io.trino.spi.connector.ConnectorSession in project trino by trinodb.
the class TestOrcPredicates method createPageSource.
private ConnectorPageSource createPageSource(TupleDomain<TestColumn> effectivePredicate, List<TestColumn> columnsToRead, ConnectorSession session, FileSplit split) {
OrcPageSourceFactory readerFactory = new OrcPageSourceFactory(new OrcReaderOptions(), HDFS_ENVIRONMENT, STATS, UTC);
Properties splitProperties = new Properties();
splitProperties.setProperty(FILE_INPUT_FORMAT, ORC.getInputFormat());
splitProperties.setProperty(SERIALIZATION_LIB, ORC.getSerde());
// Use full columns in split properties
ImmutableList.Builder<String> splitPropertiesColumnNames = ImmutableList.builder();
ImmutableList.Builder<String> splitPropertiesColumnTypes = ImmutableList.builder();
Set<String> baseColumnNames = new HashSet<>();
for (TestColumn columnToRead : columnsToRead) {
String name = columnToRead.getBaseName();
if (!baseColumnNames.contains(name) && !columnToRead.isPartitionKey()) {
baseColumnNames.add(name);
splitPropertiesColumnNames.add(name);
splitPropertiesColumnTypes.add(columnToRead.getBaseObjectInspector().getTypeName());
}
}
splitProperties.setProperty("columns", splitPropertiesColumnNames.build().stream().collect(Collectors.joining(",")));
splitProperties.setProperty("columns.types", splitPropertiesColumnTypes.build().stream().collect(Collectors.joining(",")));
List<HivePartitionKey> partitionKeys = columnsToRead.stream().filter(TestColumn::isPartitionKey).map(input -> new HivePartitionKey(input.getName(), (String) input.getWriteValue())).collect(toList());
String partitionName = String.join("/", partitionKeys.stream().map(partitionKey -> format("%s=%s", partitionKey.getName(), partitionKey.getValue())).collect(toImmutableList()));
List<HiveColumnHandle> columnHandles = getColumnHandles(columnsToRead);
TupleDomain<HiveColumnHandle> predicate = effectivePredicate.transformKeys(testColumn -> {
Optional<HiveColumnHandle> handle = columnHandles.stream().filter(column -> testColumn.getName().equals(column.getName())).findFirst();
checkState(handle.isPresent(), "Predicate on invalid column");
return handle.get();
});
List<HivePageSourceProvider.ColumnMapping> columnMappings = buildColumnMappings(partitionName, partitionKeys, columnHandles, ImmutableList.of(), TableToPartitionMapping.empty(), split.getPath(), OptionalInt.empty(), split.getLength(), Instant.now().toEpochMilli());
Optional<ConnectorPageSource> pageSource = HivePageSourceProvider.createHivePageSource(ImmutableSet.of(readerFactory), ImmutableSet.of(), new Configuration(false), session, split.getPath(), OptionalInt.empty(), split.getStart(), split.getLength(), split.getLength(), splitProperties, predicate, columnHandles, TESTING_TYPE_MANAGER, Optional.empty(), Optional.empty(), false, Optional.empty(), false, NO_ACID_TRANSACTION, columnMappings);
assertTrue(pageSource.isPresent());
return pageSource.get();
}
use of io.trino.spi.connector.ConnectorSession in project trino by trinodb.
the class TestInformationSchemaMetadata method testInformationSchemaPredicatePushdownWithConstraintPredicateOnViewsTable.
@Test
public void testInformationSchemaPredicatePushdownWithConstraintPredicateOnViewsTable() {
TransactionId transactionId = transactionManager.beginTransaction(false);
// predicate on non columns enumerating table should not cause tables to be enumerated
Constraint constraint = new Constraint(TupleDomain.all(), TestInformationSchemaMetadata::testConstraint, testConstraintColumns());
ConnectorSession session = createNewSession(transactionId);
ConnectorMetadata metadata = new InformationSchemaMetadata("test_catalog", this.metadata);
InformationSchemaTableHandle tableHandle = (InformationSchemaTableHandle) metadata.getTableHandle(session, new SchemaTableName("information_schema", "views"));
tableHandle = metadata.applyFilter(session, tableHandle, constraint).map(ConstraintApplicationResult::getHandle).map(InformationSchemaTableHandle.class::cast).orElseThrow(AssertionError::new);
assertEquals(tableHandle.getPrefixes(), ImmutableSet.of(new QualifiedTablePrefix("test_catalog", "test_schema")));
}
use of io.trino.spi.connector.ConnectorSession in project trino by trinodb.
the class TestInformationSchemaMetadata method testInformationSchemaPredicatePushdown.
/**
* Tests information schema predicate pushdown when both schema and table name are specified.
*/
@Test
public void testInformationSchemaPredicatePushdown() {
TransactionId transactionId = transactionManager.beginTransaction(false);
ImmutableMap.Builder<ColumnHandle, Domain> domains = ImmutableMap.builder();
domains.put(new InformationSchemaColumnHandle("table_schema"), Domain.singleValue(VARCHAR, Slices.utf8Slice("test_schema")));
domains.put(new InformationSchemaColumnHandle("table_name"), Domain.singleValue(VARCHAR, Slices.utf8Slice("test_view")));
Constraint constraint = new Constraint(TupleDomain.withColumnDomains(domains.buildOrThrow()));
ConnectorSession session = createNewSession(transactionId);
ConnectorMetadata metadata = new InformationSchemaMetadata("test_catalog", this.metadata);
InformationSchemaTableHandle tableHandle = (InformationSchemaTableHandle) metadata.getTableHandle(session, new SchemaTableName("information_schema", "views"));
tableHandle = metadata.applyFilter(session, tableHandle, constraint).map(ConstraintApplicationResult::getHandle).map(InformationSchemaTableHandle.class::cast).orElseThrow(AssertionError::new);
assertEquals(tableHandle.getPrefixes(), ImmutableSet.of(new QualifiedTablePrefix("test_catalog", "test_schema", "test_view")));
}
use of io.trino.spi.connector.ConnectorSession in project trino by trinodb.
the class TestInformationSchemaMetadata method testInformationSchemaPredicatePushdownOnCatalogWiseTables.
@Test
public void testInformationSchemaPredicatePushdownOnCatalogWiseTables() {
TransactionId transactionId = transactionManager.beginTransaction(false);
// Predicate pushdown shouldn't work for catalog-wise tables because the table prefixes for them are always
// ImmutableSet.of(new QualifiedTablePrefix(catalogName));
Constraint constraint = new Constraint(TupleDomain.all());
ConnectorSession session = createNewSession(transactionId);
ConnectorMetadata metadata = new InformationSchemaMetadata("test_catalog", this.metadata);
InformationSchemaTableHandle tableHandle = (InformationSchemaTableHandle) metadata.getTableHandle(session, new SchemaTableName("information_schema", "schemata"));
Optional<ConstraintApplicationResult<ConnectorTableHandle>> result = metadata.applyFilter(session, tableHandle, constraint);
assertFalse(result.isPresent());
}
use of io.trino.spi.connector.ConnectorSession in project trino by trinodb.
the class TestInformationSchemaMetadata method testInformationSchemaPredicatePushdownWithoutTablePredicate.
@Test
public void testInformationSchemaPredicatePushdownWithoutTablePredicate() {
TransactionId transactionId = transactionManager.beginTransaction(false);
// predicate without table name predicates should not cause table level prefixes to be evaluated
ImmutableMap.Builder<ColumnHandle, Domain> domains = ImmutableMap.builder();
domains.put(new InformationSchemaColumnHandle("table_schema"), Domain.singleValue(VARCHAR, Slices.utf8Slice("test_schema")));
Constraint constraint = new Constraint(TupleDomain.withColumnDomains(domains.buildOrThrow()));
ConnectorSession session = createNewSession(transactionId);
ConnectorMetadata metadata = new InformationSchemaMetadata("test_catalog", this.metadata);
InformationSchemaTableHandle tableHandle = (InformationSchemaTableHandle) metadata.getTableHandle(session, new SchemaTableName("information_schema", "views"));
tableHandle = metadata.applyFilter(session, tableHandle, constraint).map(ConstraintApplicationResult::getHandle).map(InformationSchemaTableHandle.class::cast).orElseThrow(AssertionError::new);
assertEquals(tableHandle.getPrefixes(), ImmutableSet.of(new QualifiedTablePrefix("test_catalog", "test_schema")));
}
Aggregations