use of com.facebook.presto.spi.relation.RowExpression in project presto by prestodb.
the class TestJdbcComputePushdown method testJdbcComputePushdownUnsupported.
@Test
public void testJdbcComputePushdownUnsupported() {
String table = "test_table";
String schema = "test_schema";
String expression = "(c1 + c2) > c2";
TypeProvider typeProvider = TypeProvider.copyOf(ImmutableMap.of("c1", BIGINT, "c2", BIGINT));
RowExpression rowExpression = sqlToRowExpressionTranslator.translateAndOptimize(expression(expression), typeProvider);
Set<ColumnHandle> columns = Stream.of("c1", "c2").map(TestJdbcComputePushdown::integerJdbcColumnHandle).collect(Collectors.toSet());
PlanNode original = filter(jdbcTableScan(schema, table, BIGINT, "c1", "c2"), rowExpression);
JdbcTableHandle jdbcTableHandle = new JdbcTableHandle(CONNECTOR_ID, new SchemaTableName(schema, table), CATALOG_NAME, schema, table);
ConnectorSession session = new TestingConnectorSession(ImmutableList.of());
// Test should expect an empty entry for translatedSql since > is an unsupported function currently in the optimizer
JdbcTableLayoutHandle jdbcTableLayoutHandle = new JdbcTableLayoutHandle(session.getSqlFunctionProperties(), jdbcTableHandle, TupleDomain.none(), Optional.empty());
PlanNode actual = this.jdbcComputePushdown.optimize(original, session, null, ID_ALLOCATOR);
assertPlanMatch(actual, PlanMatchPattern.filter(expression, JdbcTableScanMatcher.jdbcTableScanPattern(jdbcTableLayoutHandle, columns)));
}
use of com.facebook.presto.spi.relation.RowExpression in project presto by prestodb.
the class LogicalRowExpressions method crossProduct.
private static List<List<RowExpression>> crossProduct(List<List<RowExpression>> previousCrossProduct, List<RowExpression> clauses) {
List<List<RowExpression>> result = new ArrayList<>();
for (List<RowExpression> previousClauses : previousCrossProduct) {
for (RowExpression newClause : clauses) {
List<RowExpression> newClauses = new ArrayList<>(previousClauses);
newClauses.add(newClause);
result.add(newClauses);
}
}
return result;
}
use of com.facebook.presto.spi.relation.RowExpression in project presto by prestodb.
the class DynamicFilters method getPlaceholder.
public static Optional<DynamicFilterPlaceholder> getPlaceholder(RowExpression expression) {
if (!(expression instanceof CallExpression)) {
return Optional.empty();
}
CallExpression call = (CallExpression) expression;
List<RowExpression> arguments = call.getArguments();
if (!call.getDisplayName().equals(DynamicFilterPlaceholderFunction.NAME)) {
return Optional.empty();
}
checkArgument(arguments.size() == 3, "invalid arguments count: %s", arguments.size());
RowExpression probeSymbol = arguments.get(0);
RowExpression operatorExpression = arguments.get(1);
checkArgument(operatorExpression instanceof ConstantExpression);
checkArgument(operatorExpression.getType() instanceof VarcharType);
String operator = ((Slice) ((ConstantExpression) operatorExpression).getValue()).toStringUtf8();
RowExpression idExpression = arguments.get(2);
checkArgument(idExpression instanceof ConstantExpression);
checkArgument(idExpression.getType() instanceof VarcharType);
String id = ((Slice) ((ConstantExpression) idExpression).getValue()).toStringUtf8();
OperatorType operatorType = OperatorType.valueOf(operator);
if (operatorType.isComparisonOperator()) {
return Optional.of(new DynamicFilterPlaceholder(id, probeSymbol, operatorType));
}
return Optional.empty();
}
use of com.facebook.presto.spi.relation.RowExpression in project presto by prestodb.
the class HiveMetadata method getTableStatistics.
@Override
public TableStatistics getTableStatistics(ConnectorSession session, ConnectorTableHandle tableHandle, Optional<ConnectorTableLayoutHandle> tableLayoutHandle, List<ColumnHandle> columnHandles, Constraint<ColumnHandle> constraint) {
if (!isStatisticsEnabled(session)) {
return TableStatistics.empty();
}
if (!tableLayoutHandle.isPresent() || !((HiveTableLayoutHandle) tableLayoutHandle.get()).isPushdownFilterEnabled()) {
Map<String, ColumnHandle> columns = columnHandles.stream().map(HiveColumnHandle.class::cast).filter(not(HiveColumnHandle::isHidden)).collect(toImmutableMap(HiveColumnHandle::getName, Function.identity()));
Map<String, Type> columnTypes = columns.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> getColumnMetadata(session, tableHandle, entry.getValue()).getType()));
List<HivePartition> partitions = partitionManager.getPartitions(metastore, tableHandle, constraint, session).getPartitions();
return hiveStatisticsProvider.getTableStatistics(session, ((HiveTableHandle) tableHandle).getSchemaTableName(), columns, columnTypes, partitions);
}
verify(!constraint.predicate().isPresent());
HiveTableLayoutHandle hiveLayoutHandle = (HiveTableLayoutHandle) tableLayoutHandle.get();
Set<String> columnNames = columnHandles.stream().map(HiveColumnHandle.class::cast).map(HiveColumnHandle::getName).collect(toImmutableSet());
Set<ColumnHandle> allColumnHandles = ImmutableSet.<ColumnHandle>builder().addAll(columnHandles).addAll(hiveLayoutHandle.getPredicateColumns().values().stream().filter(column -> !columnNames.contains(column.getName())).collect(toImmutableList())).build();
Map<String, ColumnHandle> allColumns = Maps.uniqueIndex(allColumnHandles, column -> ((HiveColumnHandle) column).getName());
Map<String, Type> allColumnTypes = allColumns.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, entry -> getColumnMetadata(session, tableHandle, entry.getValue()).getType()));
Constraint<ColumnHandle> combinedConstraint = new Constraint<>(constraint.getSummary().intersect(hiveLayoutHandle.getDomainPredicate().transform(subfield -> isEntireColumn(subfield) ? subfield.getRootName() : null).transform(allColumns::get)));
SubfieldExtractor subfieldExtractor = new SubfieldExtractor(functionResolution, rowExpressionService.getExpressionOptimizer(), session);
RowExpression domainPredicate = rowExpressionService.getDomainTranslator().toPredicate(hiveLayoutHandle.getDomainPredicate().transform(subfield -> subfieldExtractor.toRowExpression(subfield, allColumnTypes.get(subfield.getRootName()))));
RowExpression combinedPredicate = binaryExpression(SpecialFormExpression.Form.AND, ImmutableList.of(hiveLayoutHandle.getRemainingPredicate(), domainPredicate));
List<HivePartition> partitions = partitionManager.getPartitions(metastore, tableHandle, combinedConstraint, session).getPartitions();
TableStatistics tableStatistics = hiveStatisticsProvider.getTableStatistics(session, ((HiveTableHandle) tableHandle).getSchemaTableName(), allColumns, allColumnTypes, partitions);
return filterStatsCalculatorService.filterStats(tableStatistics, combinedPredicate, session, ImmutableBiMap.copyOf(allColumns).inverse(), allColumnTypes);
}
use of com.facebook.presto.spi.relation.RowExpression in project presto by prestodb.
the class HivePageSourceProvider method createSelectivePageSource.
private static Optional<ConnectorPageSource> createSelectivePageSource(Set<HiveSelectivePageSourceFactory> selectivePageSourceFactories, Configuration configuration, ConnectorSession session, HiveSplit split, HiveTableLayoutHandle layout, List<HiveColumnHandle> columns, DateTimeZone hiveStorageTimeZone, TypeManager typeManager, LoadingCache<RowExpressionCacheKey, RowExpression> rowExpressionCache, SplitContext splitContext, Optional<EncryptionInformation> encryptionInformation) {
Set<HiveColumnHandle> interimColumns = ImmutableSet.<HiveColumnHandle>builder().addAll(layout.getPredicateColumns().values()).addAll(split.getBucketConversion().map(BucketConversion::getBucketColumnHandles).orElse(ImmutableList.of())).build();
Set<String> columnNames = columns.stream().map(HiveColumnHandle::getName).collect(toImmutableSet());
List<HiveColumnHandle> allColumns = ImmutableList.<HiveColumnHandle>builder().addAll(columns).addAll(interimColumns.stream().filter(column -> !columnNames.contains(column.getName())).collect(toImmutableList())).build();
Path path = new Path(split.getPath());
List<ColumnMapping> columnMappings = ColumnMapping.buildColumnMappings(split.getPartitionKeys(), allColumns, ImmutableList.of(), split.getTableToPartitionMapping(), path, split.getTableBucketNumber(), split.getFileSize(), split.getFileModifiedTime());
Optional<BucketAdaptation> bucketAdaptation = split.getBucketConversion().map(conversion -> toBucketAdaptation(conversion, columnMappings, split.getTableBucketNumber(), mapping -> mapping.getHiveColumnHandle().getHiveColumnIndex()));
Map<Integer, String> prefilledValues = columnMappings.stream().filter(mapping -> mapping.getKind() == ColumnMappingKind.PREFILLED).collect(toImmutableMap(mapping -> mapping.getHiveColumnHandle().getHiveColumnIndex(), ColumnMapping::getPrefilledValue));
Map<Integer, HiveCoercer> coercers = columnMappings.stream().filter(mapping -> mapping.getCoercionFrom().isPresent()).collect(toImmutableMap(mapping -> mapping.getHiveColumnHandle().getHiveColumnIndex(), mapping -> createCoercer(typeManager, mapping.getCoercionFrom().get(), mapping.getHiveColumnHandle().getHiveType())));
List<Integer> outputColumns = columns.stream().map(HiveColumnHandle::getHiveColumnIndex).collect(toImmutableList());
RowExpression optimizedRemainingPredicate = rowExpressionCache.getUnchecked(new RowExpressionCacheKey(layout.getRemainingPredicate(), session));
if (shouldSkipBucket(layout, split, splitContext)) {
return Optional.of(new HiveEmptySplitPageSource());
}
if (shouldSkipPartition(typeManager, layout, hiveStorageTimeZone, split, splitContext)) {
return Optional.of(new HiveEmptySplitPageSource());
}
CacheQuota cacheQuota = generateCacheQuota(split);
for (HiveSelectivePageSourceFactory pageSourceFactory : selectivePageSourceFactories) {
Optional<? extends ConnectorPageSource> pageSource = pageSourceFactory.createPageSource(configuration, session, path, split.getStart(), split.getLength(), split.getFileSize(), split.getStorage(), toColumnHandles(columnMappings, true), prefilledValues, coercers, bucketAdaptation, outputColumns, splitContext.getDynamicFilterPredicate().map(filter -> filter.transform(handle -> new Subfield(((HiveColumnHandle) handle).getName())).intersect(layout.getDomainPredicate())).orElse(layout.getDomainPredicate()), optimizedRemainingPredicate, hiveStorageTimeZone, new HiveFileContext(splitContext.isCacheable(), cacheQuota, split.getExtraFileInfo().map(BinaryExtraHiveFileInfo::new), Optional.of(split.getFileSize()), split.getFileModifiedTime(), HiveSessionProperties.isVerboseRuntimeStatsEnabled(session)), encryptionInformation);
if (pageSource.isPresent()) {
return Optional.of(pageSource.get());
}
}
return Optional.empty();
}
Aggregations