use of io.trino.spi.connector.SchemaTableName in project trino by trinodb.
the class AbstractTestHive method testGetTableNames.
@Test
public void testGetTableNames() {
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
List<SchemaTableName> tables = metadata.listTables(newSession(), Optional.of(database));
assertTrue(tables.contains(tablePartitionFormat));
assertTrue(tables.contains(tableUnpartitioned));
}
}
use of io.trino.spi.connector.SchemaTableName in project trino by trinodb.
the class AbstractTestHive method testListUnknownSchema.
@Test
public void testListUnknownSchema() {
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorSession session = newSession();
assertNull(metadata.getTableHandle(session, new SchemaTableName(INVALID_DATABASE, INVALID_TABLE)));
assertEquals(metadata.listTables(session, Optional.of(INVALID_DATABASE)), ImmutableList.of());
assertEquals(listTableColumns(metadata, session, new SchemaTablePrefix(INVALID_DATABASE, INVALID_TABLE)), ImmutableMap.of());
assertEquals(metadata.listViews(session, Optional.of(INVALID_DATABASE)), ImmutableList.of());
assertEquals(metadata.getViews(session, Optional.of(INVALID_DATABASE)), ImmutableMap.of());
assertEquals(metadata.getView(session, new SchemaTableName(INVALID_DATABASE, INVALID_TABLE)), Optional.empty());
}
}
use of io.trino.spi.connector.SchemaTableName in project trino by trinodb.
the class AbstractTestHive method testApplyProjection.
@Test
public void testApplyProjection() throws Exception {
ColumnMetadata bigIntColumn0 = new ColumnMetadata("int0", BIGINT);
ColumnMetadata bigIntColumn1 = new ColumnMetadata("int1", BIGINT);
RowType oneLevelRowType = toRowType(ImmutableList.of(bigIntColumn0, bigIntColumn1));
ColumnMetadata oneLevelRow0 = new ColumnMetadata("onelevelrow0", oneLevelRowType);
RowType twoLevelRowType = toRowType(ImmutableList.of(oneLevelRow0, bigIntColumn0, bigIntColumn1));
ColumnMetadata twoLevelRow0 = new ColumnMetadata("twolevelrow0", twoLevelRowType);
List<ColumnMetadata> columnsForApplyProjectionTest = ImmutableList.of(bigIntColumn0, bigIntColumn1, oneLevelRow0, twoLevelRow0);
SchemaTableName tableName = temporaryTable("apply_projection_tester");
doCreateEmptyTable(tableName, ORC, columnsForApplyProjectionTest);
try (Transaction transaction = newTransaction()) {
ConnectorSession session = newSession();
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
List<ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle).values().stream().filter(columnHandle -> !((HiveColumnHandle) columnHandle).isHidden()).collect(toList());
assertEquals(columnHandles.size(), columnsForApplyProjectionTest.size());
Map<String, ColumnHandle> columnHandleMap = columnHandles.stream().collect(toImmutableMap(handle -> ((HiveColumnHandle) handle).getBaseColumnName(), Function.identity()));
// Emulate symbols coming from the query plan and map them to column handles
Map<String, ColumnHandle> columnHandlesWithSymbols = ImmutableMap.of("symbol_0", columnHandleMap.get("int0"), "symbol_1", columnHandleMap.get("int1"), "symbol_2", columnHandleMap.get("onelevelrow0"), "symbol_3", columnHandleMap.get("twolevelrow0"));
// Create variables for the emulated symbols
Map<String, Variable> symbolVariableMapping = columnHandlesWithSymbols.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, e -> new Variable(e.getKey(), ((HiveColumnHandle) e.getValue()).getBaseType())));
// Create dereference expressions for testing
FieldDereference symbol2Field0 = new FieldDereference(BIGINT, symbolVariableMapping.get("symbol_2"), 0);
FieldDereference symbol3Field0 = new FieldDereference(oneLevelRowType, symbolVariableMapping.get("symbol_3"), 0);
FieldDereference symbol3Field0Field0 = new FieldDereference(BIGINT, symbol3Field0, 0);
FieldDereference symbol3Field1 = new FieldDereference(BIGINT, symbolVariableMapping.get("symbol_3"), 1);
Map<String, ColumnHandle> inputAssignments;
List<ConnectorExpression> inputProjections;
Optional<ProjectionApplicationResult<ConnectorTableHandle>> projectionResult;
List<ConnectorExpression> expectedProjections;
Map<String, Type> expectedAssignments;
// Test projected columns pushdown to HiveTableHandle in case of all variable references
inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_0", "symbol_1"));
inputProjections = ImmutableList.of(symbolVariableMapping.get("symbol_0"), symbolVariableMapping.get("symbol_1"));
expectedAssignments = ImmutableMap.of("symbol_0", BIGINT, "symbol_1", BIGINT);
projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
assertProjectionResult(projectionResult, false, inputProjections, expectedAssignments);
// Empty result when projected column handles are same as those present in table handle
projectionResult = metadata.applyProjection(session, projectionResult.get().getHandle(), inputProjections, inputAssignments);
assertProjectionResult(projectionResult, true, ImmutableList.of(), ImmutableMap.of());
// Extra columns handles in HiveTableHandle should get pruned
projectionResult = metadata.applyProjection(session, ((HiveTableHandle) tableHandle).withProjectedColumns(ImmutableSet.copyOf(columnHandles)), inputProjections, inputAssignments);
assertProjectionResult(projectionResult, false, inputProjections, expectedAssignments);
// Test projection pushdown for dereferences
inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2", "symbol_3"));
inputProjections = ImmutableList.of(symbol2Field0, symbol3Field0Field0, symbol3Field1);
expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT, "twolevelrow0#f_onelevelrow0#f_int0", BIGINT, "twolevelrow0#f_int0", BIGINT);
expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), new Variable("twolevelrow0#f_onelevelrow0#f_int0", BIGINT), new Variable("twolevelrow0#f_int0", BIGINT));
projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments);
// Test reuse of virtual column handles
// Round-1: input projections [symbol_2, symbol_2.int0]. virtual handle is created for symbol_2.int0.
inputAssignments = getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2"));
inputProjections = ImmutableList.of(symbol2Field0, symbolVariableMapping.get("symbol_2"));
projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), symbolVariableMapping.get("symbol_2"));
expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT, "symbol_2", oneLevelRowType);
assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments);
// Round-2: input projections [symbol_2.int0 and onelevelrow0#f_int0]. Virtual handle is reused.
Assignment newlyCreatedColumn = getOnlyElement(projectionResult.get().getAssignments().stream().filter(handle -> handle.getVariable().equals("onelevelrow0#f_int0")).collect(toList()));
inputAssignments = ImmutableMap.<String, ColumnHandle>builder().putAll(getColumnHandlesFor(columnHandlesWithSymbols, ImmutableList.of("symbol_2"))).put(newlyCreatedColumn.getVariable(), newlyCreatedColumn.getColumn()).buildOrThrow();
inputProjections = ImmutableList.of(symbol2Field0, new Variable("onelevelrow0#f_int0", BIGINT));
projectionResult = metadata.applyProjection(session, tableHandle, inputProjections, inputAssignments);
expectedProjections = ImmutableList.of(new Variable("onelevelrow0#f_int0", BIGINT), new Variable("onelevelrow0#f_int0", BIGINT));
expectedAssignments = ImmutableMap.of("onelevelrow0#f_int0", BIGINT);
assertProjectionResult(projectionResult, false, expectedProjections, expectedAssignments);
} finally {
dropTable(tableName);
}
}
use of io.trino.spi.connector.SchemaTableName in project trino by trinodb.
the class AbstractTestHive method testIllegalStorageFormatDuringTableScan.
/**
* During table scan, the illegal storage format for some specific table should not fail the whole table scan
*/
@Test
public void testIllegalStorageFormatDuringTableScan() {
SchemaTableName schemaTableName = temporaryTable("test_illegal_storage_format");
try (Transaction transaction = newTransaction()) {
ConnectorSession session = newSession();
List<Column> columns = ImmutableList.of(new Column("pk", HIVE_STRING, Optional.empty()));
String tableOwner = session.getUser();
String schemaName = schemaTableName.getSchemaName();
String tableName = schemaTableName.getTableName();
LocationHandle locationHandle = locationService.forNewTable(transaction.getMetastore(), session, schemaName, tableName, Optional.empty());
Path targetPath = locationService.getQueryWriteInfo(locationHandle).getTargetPath();
// create table whose storage format is null
Table.Builder tableBuilder = Table.builder().setDatabaseName(schemaName).setTableName(tableName).setOwner(Optional.of(tableOwner)).setTableType(TableType.MANAGED_TABLE.name()).setParameters(ImmutableMap.of(PRESTO_VERSION_NAME, TEST_SERVER_VERSION, PRESTO_QUERY_ID_NAME, session.getQueryId())).setDataColumns(columns).withStorage(storage -> storage.setLocation(targetPath.toString()).setStorageFormat(StorageFormat.createNullable(null, null, null)).setSerdeParameters(ImmutableMap.of()));
PrincipalPrivileges principalPrivileges = testingPrincipalPrivilege(tableOwner, session.getUser());
transaction.getMetastore().createTable(session, tableBuilder.build(), principalPrivileges, Optional.empty(), Optional.empty(), true, EMPTY_TABLE_STATISTICS, false);
transaction.commit();
}
// to make sure it can still be retrieved instead of throwing exception.
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
Map<SchemaTableName, List<ColumnMetadata>> allColumns = listTableColumns(metadata, newSession(), new SchemaTablePrefix(schemaTableName.getSchemaName()));
assertTrue(allColumns.containsKey(schemaTableName));
} finally {
dropTable(schemaTableName);
}
}
use of io.trino.spi.connector.SchemaTableName in project trino by trinodb.
the class AbstractTestHive method testGetTableSchemaOffline.
@Test
public void testGetTableSchemaOffline() {
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
Map<SchemaTableName, List<ColumnMetadata>> columns = listTableColumns(metadata, newSession(), tableOffline.toSchemaTablePrefix());
assertEquals(columns.size(), 1);
Map<String, ColumnMetadata> map = uniqueIndex(getOnlyElement(columns.values()), ColumnMetadata::getName);
assertPrimitiveField(map, "t_string", createUnboundedVarcharType(), false);
}
}
Aggregations