use of io.trino.metadata.TableMetadata in project trino by trinodb.
the class ApplyTableScanRedirection method apply.
@Override
public Result apply(TableScanNode scanNode, Captures captures, Context context) {
Optional<TableScanRedirectApplicationResult> tableScanRedirectApplicationResult = plannerContext.getMetadata().applyTableScanRedirect(context.getSession(), scanNode.getTable());
if (tableScanRedirectApplicationResult.isEmpty()) {
return Result.empty();
}
CatalogSchemaTableName destinationTable = tableScanRedirectApplicationResult.get().getDestinationTable();
QualifiedObjectName destinationObjectName = convertFromSchemaTableName(destinationTable.getCatalogName()).apply(destinationTable.getSchemaTableName());
Optional<QualifiedObjectName> redirectedObjectName = plannerContext.getMetadata().getRedirectionAwareTableHandle(context.getSession(), destinationObjectName).getRedirectedTableName();
redirectedObjectName.ifPresent(name -> {
throw new TrinoException(NOT_SUPPORTED, format("Further redirection of destination table '%s' to '%s' is not supported", destinationObjectName, name));
});
TableMetadata tableMetadata = plannerContext.getMetadata().getTableMetadata(context.getSession(), scanNode.getTable());
CatalogSchemaTableName sourceTable = new CatalogSchemaTableName(tableMetadata.getCatalogName().getCatalogName(), tableMetadata.getTable());
if (destinationTable.equals(sourceTable)) {
return Result.empty();
}
Optional<TableHandle> destinationTableHandle = plannerContext.getMetadata().getTableHandle(context.getSession(), convertFromSchemaTableName(destinationTable.getCatalogName()).apply(destinationTable.getSchemaTableName()));
if (destinationTableHandle.isEmpty()) {
throw new TrinoException(TABLE_NOT_FOUND, format("Destination table %s from table scan redirection not found", destinationTable));
}
Map<ColumnHandle, String> columnMapping = tableScanRedirectApplicationResult.get().getDestinationColumns();
Map<String, ColumnHandle> destinationColumnHandles = plannerContext.getMetadata().getColumnHandles(context.getSession(), destinationTableHandle.get());
ImmutableMap.Builder<Symbol, Cast> casts = ImmutableMap.builder();
ImmutableMap.Builder<Symbol, ColumnHandle> newAssignmentsBuilder = ImmutableMap.builder();
for (Map.Entry<Symbol, ColumnHandle> assignment : scanNode.getAssignments().entrySet()) {
String destinationColumn = columnMapping.get(assignment.getValue());
if (destinationColumn == null) {
throw new TrinoException(COLUMN_NOT_FOUND, format("Did not find mapping for source column %s in table scan redirection", assignment.getValue()));
}
ColumnHandle destinationColumnHandle = destinationColumnHandles.get(destinationColumn);
if (destinationColumnHandle == null) {
throw new TrinoException(COLUMN_NOT_FOUND, format("Did not find handle for column %s in destination table %s", destinationColumn, destinationTable));
}
// insert ts if redirected types don't match source types
Type sourceType = context.getSymbolAllocator().getTypes().get(assignment.getKey());
Type redirectedType = plannerContext.getMetadata().getColumnMetadata(context.getSession(), destinationTableHandle.get(), destinationColumnHandle).getType();
if (!sourceType.equals(redirectedType)) {
Symbol redirectedSymbol = context.getSymbolAllocator().newSymbol(destinationColumn, redirectedType);
Cast cast = getCast(context.getSession(), destinationTable, destinationColumn, redirectedType, redirectedSymbol, sourceTable, assignment.getValue(), sourceType);
casts.put(assignment.getKey(), cast);
newAssignmentsBuilder.put(redirectedSymbol, destinationColumnHandle);
} else {
newAssignmentsBuilder.put(assignment.getKey(), destinationColumnHandle);
}
}
TupleDomain<String> requiredFilter = tableScanRedirectApplicationResult.get().getFilter();
if (requiredFilter.isAll()) {
ImmutableMap<Symbol, ColumnHandle> newAssignments = newAssignmentsBuilder.buildOrThrow();
return Result.ofPlanNode(applyProjection(context.getIdAllocator(), ImmutableSet.copyOf(scanNode.getOutputSymbols()), casts.buildOrThrow(), new TableScanNode(scanNode.getId(), destinationTableHandle.get(), ImmutableList.copyOf(newAssignments.keySet()), newAssignments, TupleDomain.all(), // Use table statistics from destination table
Optional.empty(), scanNode.isUpdateTarget(), Optional.empty())));
}
Map<ColumnHandle, Symbol> inverseAssignments = ImmutableBiMap.copyOf(scanNode.getAssignments()).inverse();
Map<String, ColumnHandle> inverseColumnsMapping = ImmutableBiMap.copyOf(columnMapping).inverse();
TupleDomain<Symbol> transformedConstraint = requiredFilter.transformKeys(destinationColumn -> {
ColumnHandle sourceColumnHandle = inverseColumnsMapping.get(destinationColumn);
if (sourceColumnHandle == null) {
throw new TrinoException(COLUMN_NOT_FOUND, format("Did not find mapping for destination column %s in table scan redirection", destinationColumn));
}
Symbol symbol = inverseAssignments.get(sourceColumnHandle);
if (symbol != null) {
// domain symbol should already be mapped in redirected table scan
return symbol;
}
// Column pruning after predicate is pushed into table scan can remove assignments for filter columns from the scan node
Type domainType = requiredFilter.getDomains().get().get(destinationColumn).getType();
symbol = context.getSymbolAllocator().newSymbol(destinationColumn, domainType);
ColumnHandle destinationColumnHandle = destinationColumnHandles.get(destinationColumn);
if (destinationColumnHandle == null) {
throw new TrinoException(COLUMN_NOT_FOUND, format("Did not find handle for column %s in destination table %s", destinationColumn, destinationTable));
}
// insert casts if redirected types don't match domain types
Type redirectedType = plannerContext.getMetadata().getColumnMetadata(context.getSession(), destinationTableHandle.get(), destinationColumnHandle).getType();
if (!domainType.equals(redirectedType)) {
Symbol redirectedSymbol = context.getSymbolAllocator().newSymbol(destinationColumn, redirectedType);
Cast cast = getCast(context.getSession(), destinationTable, destinationColumn, redirectedType, redirectedSymbol, sourceTable, sourceColumnHandle, domainType);
casts.put(symbol, cast);
newAssignmentsBuilder.put(redirectedSymbol, destinationColumnHandle);
} else {
newAssignmentsBuilder.put(symbol, destinationColumnHandle);
}
return symbol;
});
Map<Symbol, ColumnHandle> newAssignments = newAssignmentsBuilder.buildOrThrow();
TableScanNode newScanNode = new TableScanNode(scanNode.getId(), destinationTableHandle.get(), ImmutableList.copyOf(newAssignments.keySet()), newAssignments, TupleDomain.all(), // Use table statistics from destination table
Optional.empty(), scanNode.isUpdateTarget(), Optional.empty());
DomainTranslator domainTranslator = new DomainTranslator(plannerContext);
FilterNode filterNode = new FilterNode(context.getIdAllocator().getNextId(), applyProjection(context.getIdAllocator(), newAssignments.keySet(), casts.buildOrThrow(), newScanNode), domainTranslator.toPredicate(context.getSession(), transformedConstraint));
return Result.ofPlanNode(applyProjection(context.getIdAllocator(), ImmutableSet.copyOf(scanNode.getOutputSymbols()), ImmutableMap.of(), filterNode));
}
use of io.trino.metadata.TableMetadata in project trino by trinodb.
the class IndexSourceMatcher method detailMatches.
@Override
public MatchResult detailMatches(PlanNode node, StatsProvider stats, Session session, Metadata metadata, SymbolAliases symbolAliases) {
checkState(shapeMatches(node), "Plan testing framework error: shapeMatches returned false in detailMatches in %s", this.getClass().getName());
IndexSourceNode indexSourceNode = (IndexSourceNode) node;
TableMetadata tableMetadata = metadata.getTableMetadata(session, indexSourceNode.getTableHandle());
String actualTableName = tableMetadata.getTable().getTableName();
if (!expectedTableName.equalsIgnoreCase(actualTableName)) {
return NO_MATCH;
}
return match();
}
use of io.trino.metadata.TableMetadata in project trino by trinodb.
the class TableScanMatcher method detailMatches.
@Override
public MatchResult detailMatches(PlanNode node, StatsProvider stats, Session session, Metadata metadata, SymbolAliases symbolAliases) {
checkState(shapeMatches(node), "Plan testing framework error: shapeMatches returned false in detailMatches in %s", this.getClass().getName());
TableScanNode tableScanNode = (TableScanNode) node;
TableMetadata tableMetadata = metadata.getTableMetadata(session, tableScanNode.getTable());
String actualTableName = tableMetadata.getTable().getTableName();
// TODO (https://github.com/trinodb/trino/issues/17) change to equals()
if (!expectedTableName.equalsIgnoreCase(actualTableName)) {
return NO_MATCH;
}
if (!domainsMatch(expectedConstraint, tableScanNode.getEnforcedConstraint(), tableScanNode.getTable(), session, metadata)) {
return NO_MATCH;
}
return new MatchResult(true);
}
use of io.trino.metadata.TableMetadata in project trino by trinodb.
the class BaseHiveConnectorTest method testBucketHiddenColumn.
@Test
public void testBucketHiddenColumn() {
@Language("SQL") String createTable = "CREATE TABLE test_bucket_hidden_column " + "WITH (" + "bucketed_by = ARRAY['col0']," + "bucket_count = 2" + ") AS " + "SELECT * FROM (VALUES " + "(0, 11), (1, 12), (2, 13), " + "(3, 14), (4, 15), (5, 16), " + "(6, 17), (7, 18), (8, 19)" + " ) t (col0, col1) ";
assertUpdate(createTable, 9);
assertTrue(getQueryRunner().tableExists(getSession(), "test_bucket_hidden_column"));
TableMetadata tableMetadata = getTableMetadata(catalog, TPCH_SCHEMA, "test_bucket_hidden_column");
assertEquals(tableMetadata.getMetadata().getProperties().get(BUCKETED_BY_PROPERTY), ImmutableList.of("col0"));
assertEquals(tableMetadata.getMetadata().getProperties().get(BUCKET_COUNT_PROPERTY), 2);
List<String> columnNames = ImmutableList.of("col0", "col1", PATH_COLUMN_NAME, BUCKET_COLUMN_NAME, FILE_SIZE_COLUMN_NAME, FILE_MODIFIED_TIME_COLUMN_NAME);
List<ColumnMetadata> columnMetadatas = tableMetadata.getColumns();
assertEquals(columnMetadatas.size(), columnNames.size());
for (int i = 0; i < columnMetadatas.size(); i++) {
ColumnMetadata columnMetadata = columnMetadatas.get(i);
assertEquals(columnMetadata.getName(), columnNames.get(i));
if (columnMetadata.getName().equals(BUCKET_COLUMN_NAME)) {
// $bucket_number should be hidden column
assertTrue(columnMetadata.isHidden());
}
}
assertEquals(getBucketCount("test_bucket_hidden_column"), 2);
MaterializedResult results = computeActual(format("SELECT *, \"%1$s\" FROM test_bucket_hidden_column WHERE \"%1$s\" = 1", BUCKET_COLUMN_NAME));
for (int i = 0; i < results.getRowCount(); i++) {
MaterializedRow row = results.getMaterializedRows().get(i);
int col0 = (int) row.getField(0);
int col1 = (int) row.getField(1);
int bucket = (int) row.getField(2);
assertEquals(col1, col0 + 11);
assertTrue(col1 % 2 == 0);
// Because Hive's hash function for integer n is h(n) = n.
assertEquals(bucket, col0 % 2);
}
assertEquals(results.getRowCount(), 4);
assertUpdate("DROP TABLE test_bucket_hidden_column");
assertFalse(getQueryRunner().tableExists(getSession(), "test_bucket_hidden_column"));
}
use of io.trino.metadata.TableMetadata in project trino by trinodb.
the class BaseHiveConnectorTest method testInsert.
private void testInsert(Session session, HiveStorageFormat storageFormat) {
@Language("SQL") String createTable = "" + "CREATE TABLE test_insert_format_table " + "(" + " _string VARCHAR," + " _varchar VARCHAR(65535)," + " _char CHAR(10)," + " _bigint BIGINT," + " _integer INTEGER," + " _smallint SMALLINT," + " _tinyint TINYINT," + " _real REAL," + " _double DOUBLE," + " _boolean BOOLEAN," + " _decimal_short DECIMAL(3,2)," + " _decimal_long DECIMAL(30,10)" + ") " + "WITH (format = '" + storageFormat + "') ";
if (storageFormat == HiveStorageFormat.AVRO) {
createTable = createTable.replace(" _smallint SMALLINT,", " _smallint INTEGER,");
createTable = createTable.replace(" _tinyint TINYINT,", " _tinyint INTEGER,");
}
assertUpdate(session, createTable);
TableMetadata tableMetadata = getTableMetadata(catalog, TPCH_SCHEMA, "test_insert_format_table");
assertEquals(tableMetadata.getMetadata().getProperties().get(STORAGE_FORMAT_PROPERTY), storageFormat);
assertColumnType(tableMetadata, "_string", createUnboundedVarcharType());
assertColumnType(tableMetadata, "_varchar", createVarcharType(65535));
assertColumnType(tableMetadata, "_char", createCharType(10));
@Language("SQL") String select = "SELECT" + " 'foo' _string" + ", 'bar' _varchar" + ", CAST('boo' AS CHAR(10)) _char" + ", 1 _bigint" + ", CAST(42 AS INTEGER) _integer" + ", CAST(43 AS SMALLINT) _smallint" + ", CAST(44 AS TINYINT) _tinyint" + ", CAST('123.45' AS REAL) _real" + ", CAST('3.14' AS DOUBLE) _double" + ", true _boolean" + ", CAST('3.14' AS DECIMAL(3,2)) _decimal_short" + ", CAST('12345678901234567890.0123456789' AS DECIMAL(30,10)) _decimal_long";
if (storageFormat == HiveStorageFormat.AVRO) {
select = select.replace(" CAST (43 AS SMALLINT) _smallint,", " 3 _smallint,");
select = select.replace(" CAST (44 AS TINYINT) _tinyint,", " 4 _tinyint,");
}
assertUpdate(session, "INSERT INTO test_insert_format_table " + select, 1);
assertQuery(session, "SELECT * FROM test_insert_format_table", select);
assertUpdate(session, "INSERT INTO test_insert_format_table (_tinyint, _smallint, _integer, _bigint, _real, _double) SELECT CAST(1 AS TINYINT), CAST(2 AS SMALLINT), 3, 4, cast(14.3E0 as REAL), 14.3E0", 1);
assertQuery(session, "SELECT * FROM test_insert_format_table WHERE _bigint = 4", "SELECT null, null, null, 4, 3, 2, 1, 14.3, 14.3, null, null, null");
assertQuery(session, "SELECT * FROM test_insert_format_table WHERE _real = CAST(14.3 as REAL)", "SELECT null, null, null, 4, 3, 2, 1, 14.3, 14.3, null, null, null");
assertUpdate(session, "INSERT INTO test_insert_format_table (_double, _bigint) SELECT 2.72E0, 3", 1);
assertQuery(session, "SELECT * FROM test_insert_format_table WHERE _double = CAST(2.72E0 as DOUBLE)", "SELECT null, null, null, 3, null, null, null, null, 2.72, null, null, null");
assertUpdate(session, "INSERT INTO test_insert_format_table (_decimal_short, _decimal_long) SELECT DECIMAL '2.72', DECIMAL '98765432101234567890.0123456789'", 1);
assertQuery(session, "SELECT * FROM test_insert_format_table WHERE _decimal_long = DECIMAL '98765432101234567890.0123456789'", "SELECT null, null, null, null, null, null, null, null, null, null, 2.72, 98765432101234567890.0123456789");
assertUpdate(session, "DROP TABLE test_insert_format_table");
assertFalse(getQueryRunner().tableExists(session, "test_insert_format_table"));
}
Aggregations