use of io.trino.spi.connector.ColumnHandle in project trino by trinodb.
the class AbstractTestHive method setupHive.
protected void setupHive(String databaseName) {
database = databaseName;
tablePartitionFormat = new SchemaTableName(database, "trino_test_partition_format");
tableUnpartitioned = new SchemaTableName(database, "trino_test_unpartitioned");
tableOffline = new SchemaTableName(database, "trino_test_offline");
tableOfflinePartition = new SchemaTableName(database, "trino_test_offline_partition");
tableNotReadable = new SchemaTableName(database, "trino_test_not_readable");
view = new SchemaTableName(database, "trino_test_view");
invalidTable = new SchemaTableName(database, INVALID_TABLE);
tableBucketedStringInt = new SchemaTableName(database, "trino_test_bucketed_by_string_int");
tableBucketedBigintBoolean = new SchemaTableName(database, "trino_test_bucketed_by_bigint_boolean");
tableBucketedDoubleFloat = new SchemaTableName(database, "trino_test_bucketed_by_double_float");
tablePartitionSchemaChange = new SchemaTableName(database, "trino_test_partition_schema_change");
tablePartitionSchemaChangeNonCanonical = new SchemaTableName(database, "trino_test_partition_schema_change_non_canonical");
tableBucketEvolution = new SchemaTableName(database, "trino_test_bucket_evolution");
invalidTableHandle = new HiveTableHandle(database, INVALID_TABLE, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty());
dsColumn = createBaseColumn("ds", -1, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty());
fileFormatColumn = createBaseColumn("file_format", -1, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty());
dummyColumn = createBaseColumn("dummy", -1, HIVE_INT, INTEGER, PARTITION_KEY, Optional.empty());
intColumn = createBaseColumn("t_int", -1, HIVE_INT, INTEGER, PARTITION_KEY, Optional.empty());
invalidColumnHandle = createBaseColumn(INVALID_COLUMN, 0, HIVE_STRING, VARCHAR, REGULAR, Optional.empty());
List<ColumnHandle> partitionColumns = ImmutableList.of(dsColumn, fileFormatColumn, dummyColumn);
tablePartitionFormatPartitions = ImmutableList.<HivePartition>builder().add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=textfile/dummy=1", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("textfile"))).put(dummyColumn, NullableValue.of(INTEGER, 1L)).buildOrThrow())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=sequencefile/dummy=2", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("sequencefile"))).put(dummyColumn, NullableValue.of(INTEGER, 2L)).buildOrThrow())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=rctext/dummy=3", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("rctext"))).put(dummyColumn, NullableValue.of(INTEGER, 3L)).buildOrThrow())).add(new HivePartition(tablePartitionFormat, "ds=2012-12-29/file_format=rcbinary/dummy=4", ImmutableMap.<ColumnHandle, NullableValue>builder().put(dsColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("2012-12-29"))).put(fileFormatColumn, NullableValue.of(createUnboundedVarcharType(), utf8Slice("rcbinary"))).put(dummyColumn, NullableValue.of(INTEGER, 4L)).buildOrThrow())).build();
tableUnpartitionedPartitions = ImmutableList.of(new HivePartition(tableUnpartitioned));
tablePartitionFormatProperties = new ConnectorTableProperties(TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("textfile")), Range.equal(createUnboundedVarcharType(), utf8Slice("sequencefile")), Range.equal(createUnboundedVarcharType(), utf8Slice("rctext")), Range.equal(createUnboundedVarcharType(), utf8Slice("rcbinary"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 1L), Range.equal(INTEGER, 2L), Range.equal(INTEGER, 3L), Range.equal(INTEGER, 4L)), false))), Optional.empty(), Optional.empty(), Optional.of(new DiscretePredicates(partitionColumns, ImmutableList.of(TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("textfile"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 1L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("sequencefile"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 2L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("rctext"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 3L)), false))), TupleDomain.withColumnDomains(ImmutableMap.of(dsColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("2012-12-29"))), false), fileFormatColumn, Domain.create(ValueSet.ofRanges(Range.equal(createUnboundedVarcharType(), utf8Slice("rcbinary"))), false), dummyColumn, Domain.create(ValueSet.ofRanges(Range.equal(INTEGER, 4L)), false)))))), ImmutableList.of());
tableUnpartitionedProperties = new ConnectorTableProperties();
}
use of io.trino.spi.connector.ColumnHandle in project trino by trinodb.
the class TestLocalDynamicFiltersCollector method testDynamicFilterCoercion.
@Test
public void testDynamicFilterCoercion() {
LocalDynamicFiltersCollector collector = new LocalDynamicFiltersCollector(TEST_SESSION);
DynamicFilterId filterId = new DynamicFilterId("filter");
collector.register(ImmutableSet.of(filterId));
SymbolAllocator symbolAllocator = new SymbolAllocator();
Symbol symbol = symbolAllocator.newSymbol("symbol", INTEGER);
ColumnHandle column = new TestingColumnHandle("column");
DynamicFilter filter = createDynamicFilter(collector, ImmutableList.of(new DynamicFilters.Descriptor(filterId, new Cast(symbol.toSymbolReference(), toSqlType(BIGINT)))), ImmutableMap.of(symbol, column), symbolAllocator.getTypes());
assertEquals(filter.getColumnsCovered(), Set.of(column), "columns covered");
// Filter is blocked and not completed.
CompletableFuture<?> isBlocked = filter.isBlocked();
assertFalse(filter.isComplete());
assertTrue(filter.isAwaitable());
assertFalse(isBlocked.isDone());
assertEquals(filter.getCurrentPredicate(), TupleDomain.all());
Domain domain = Domain.singleValue(BIGINT, 7L);
collector.collectDynamicFilterDomains(ImmutableMap.of(filterId, domain));
// Unblocked and completed.
assertTrue(filter.isComplete());
assertFalse(filter.isAwaitable());
assertTrue(isBlocked.isDone());
assertEquals(filter.getCurrentPredicate(), TupleDomain.withColumnDomains(ImmutableMap.of(column, Domain.singleValue(INTEGER, 7L))));
}
use of io.trino.spi.connector.ColumnHandle in project trino by trinodb.
the class TestLocalDynamicFiltersCollector method testMultipleBuildColumnsSingleProbeColumn.
@Test
public void testMultipleBuildColumnsSingleProbeColumn() {
LocalDynamicFiltersCollector collector = new LocalDynamicFiltersCollector(TEST_SESSION);
DynamicFilterId filter1 = new DynamicFilterId("filter1");
DynamicFilterId filter2 = new DynamicFilterId("filter2");
collector.register(ImmutableSet.of(filter1));
collector.register(ImmutableSet.of(filter2));
// Multiple build-side columns matching the same probe-side column.
SymbolAllocator symbolAllocator = new SymbolAllocator();
Symbol symbol = symbolAllocator.newSymbol("symbol", BIGINT);
ColumnHandle column = new TestingColumnHandle("column");
DynamicFilter filter = createDynamicFilter(collector, ImmutableList.of(new DynamicFilters.Descriptor(filter1, symbol.toSymbolReference()), new DynamicFilters.Descriptor(filter2, symbol.toSymbolReference())), ImmutableMap.of(symbol, column), symbolAllocator.getTypes());
assertEquals(filter.getColumnsCovered(), Set.of(column), "columns covered");
// Filter is blocking and not completed.
CompletableFuture<?> isBlocked = filter.isBlocked();
assertFalse(filter.isComplete());
assertTrue(filter.isAwaitable());
assertFalse(isBlocked.isDone());
assertEquals(filter.getCurrentPredicate(), TupleDomain.all());
collector.collectDynamicFilterDomains(ImmutableMap.of(filter1, Domain.multipleValues(BIGINT, ImmutableList.of(1L, 2L, 3L))));
// Unblocked, but not completed.
assertFalse(filter.isComplete());
assertTrue(filter.isAwaitable());
assertTrue(isBlocked.isDone());
assertEquals(filter.getCurrentPredicate(), TupleDomain.withColumnDomains(ImmutableMap.of(column, Domain.multipleValues(BIGINT, ImmutableList.of(1L, 2L, 3L)))));
// Create a new blocking future, waiting for next completion.
isBlocked = filter.isBlocked();
assertFalse(isBlocked.isDone());
assertFalse(filter.isComplete());
assertTrue(filter.isAwaitable());
collector.collectDynamicFilterDomains(ImmutableMap.of(filter2, Domain.multipleValues(BIGINT, ImmutableList.of(2L, 3L, 4L))));
// Unblocked and completed.
assertTrue(filter.isComplete());
assertFalse(filter.isAwaitable());
assertTrue(isBlocked.isDone());
assertEquals(filter.getCurrentPredicate(), TupleDomain.withColumnDomains(ImmutableMap.of(column, Domain.multipleValues(BIGINT, ImmutableList.of(2L, 3L)))));
}
use of io.trino.spi.connector.ColumnHandle in project trino by trinodb.
the class TestTableScanRedirectionWithPushdown method testRedirectionBeforeDeferencePushdown.
@Test
public void testRedirectionBeforeDeferencePushdown() {
// the connector can detect that source_col_a and source_col_d is projected
try (LocalQueryRunner queryRunner = createLocalQueryRunner(mockApplyRedirectAfterProjectionPushdown(ROW_TYPE_REDIRECTION_MAPPING_AD, Optional.of(ImmutableSet.of(SOURCE_COLUMN_HANDLE_A, SOURCE_COLUMN_HANDLE_D))), Optional.of(this::mockApplyProjection), Optional.empty())) {
// Pushdown of dereference for source_col_d.a into table scan results in a new column handle
// Table scan redirection would not take place if dereference pushdown has already taken place before redirection
ColumnHandle destinationColumnHandleC0 = new MockConnectorColumnHandle(DESTINATION_COLUMN_NAME_C + "#0", BIGINT);
assertPlan(queryRunner, "SELECT source_col_a, source_col_d.a FROM test_table", output(ImmutableList.of("DEST_COL_A", "DEST_COL_C#0"), tableScan(new MockConnectorTableHandle(DESTINATION_TABLE, TupleDomain.all(), Optional.of(ImmutableList.of(DESTINATION_COLUMN_HANDLE_A, destinationColumnHandleC0)))::equals, TupleDomain.all(), ImmutableMap.of("DEST_COL_A", DESTINATION_COLUMN_HANDLE_A::equals, "DEST_COL_C#0", destinationColumnHandleC0::equals))));
}
}
use of io.trino.spi.connector.ColumnHandle in project trino by trinodb.
the class TestTableScanRedirectionWithPushdown method mockApplyProjection.
private Optional<ProjectionApplicationResult<ConnectorTableHandle>> mockApplyProjection(ConnectorSession session, ConnectorTableHandle tableHandle, List<ConnectorExpression> projections, Map<String, ColumnHandle> assignments) {
MockConnectorTableHandle handle = (MockConnectorTableHandle) tableHandle;
ImmutableList.Builder<ColumnHandle> newColumnsBuilder = ImmutableList.builder();
ImmutableList.Builder<ConnectorExpression> outputExpressions = ImmutableList.builder();
ImmutableList.Builder<Assignment> outputAssignments = ImmutableList.builder();
for (ConnectorExpression projection : projections) {
String newVariableName;
ColumnHandle newColumnHandle;
if (projection instanceof Variable) {
newVariableName = ((Variable) projection).getName();
newColumnHandle = assignments.get(newVariableName);
} else if (projection instanceof FieldDereference) {
FieldDereference dereference = (FieldDereference) projection;
if (!(dereference.getTarget() instanceof Variable)) {
throw new UnsupportedOperationException();
}
String dereferenceTargetName = ((Variable) dereference.getTarget()).getName();
newVariableName = ((MockConnectorColumnHandle) assignments.get(dereferenceTargetName)).getName() + "#" + dereference.getField();
newColumnHandle = new MockConnectorColumnHandle(newVariableName, projection.getType());
} else {
throw new UnsupportedOperationException();
}
Variable newVariable = new Variable(newVariableName, projection.getType());
newColumnsBuilder.add(newColumnHandle);
outputExpressions.add(newVariable);
outputAssignments.add(new Assignment(newVariableName, newColumnHandle, projection.getType()));
}
List<ColumnHandle> newColumns = newColumnsBuilder.build();
if (handle.getColumns().isPresent() && newColumns.equals(handle.getColumns().get())) {
return Optional.empty();
}
return Optional.of(new ProjectionApplicationResult<>(new MockConnectorTableHandle(handle.getTableName(), handle.getConstraint(), Optional.of(newColumns)), outputExpressions.build(), outputAssignments.build(), false));
}
Aggregations