use of io.trino.spi.connector.ColumnMetadata in project trino by trinodb.
the class TestRaptorMetadata method assertTableEqual.
private static void assertTableEqual(ConnectorTableMetadata actual, ConnectorTableMetadata expected) {
assertEquals(actual.getTable(), expected.getTable());
List<ColumnMetadata> actualColumns = actual.getColumns().stream().filter(columnMetadata -> !columnMetadata.isHidden()).collect(Collectors.toList());
List<ColumnMetadata> expectedColumns = expected.getColumns();
assertEquals(actualColumns.size(), expectedColumns.size());
for (int i = 0; i < actualColumns.size(); i++) {
ColumnMetadata actualColumn = actualColumns.get(i);
ColumnMetadata expectedColumn = expectedColumns.get(i);
assertEquals(actualColumn.getName(), expectedColumn.getName());
assertEquals(actualColumn.getType(), expectedColumn.getType());
}
assertEquals(actual.getProperties(), expected.getProperties());
}
use of io.trino.spi.connector.ColumnMetadata in project trino by trinodb.
the class TestQueryTracker method createQueryRunner.
@Override
protected QueryRunner createQueryRunner() throws Exception {
Session defaultSession = testSessionBuilder().setCatalog("mock").setSchema("default").setSystemProperty(QUERY_MAX_PLANNING_TIME, "2s").build();
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(defaultSession).build();
queryRunner.installPlugin(new Plugin() {
@Override
public Iterable<ConnectorFactory> getConnectorFactories() {
return ImmutableList.of(MockConnectorFactory.builder().withGetColumns(ignored -> ImmutableList.of(new ColumnMetadata("col", VARCHAR))).withApplyFilter((ignored1, ignored2, ignored3) -> freeze()).build());
}
});
queryRunner.createCatalog("mock", "mock");
return queryRunner;
}
use of io.trino.spi.connector.ColumnMetadata in project trino by trinodb.
the class TestRefreshMaterializedView method createQueryRunner.
@Override
protected QueryRunner createQueryRunner() throws Exception {
Session session = testSessionBuilder().setCatalog("mock").setSchema("default").build();
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(session).build();
queryRunner.installPlugin(new MockConnectorPlugin(MockConnectorFactory.builder().withListSchemaNames(connectionSession -> ImmutableList.of("default")).withGetColumns(schemaTableName -> ImmutableList.of(new ColumnMetadata("nationkey", BIGINT))).withGetTableHandle((connectorSession, tableName) -> new MockConnectorTableHandle(tableName)).withGetMaterializedViews((connectorSession, schemaTablePrefix) -> ImmutableMap.of(new SchemaTableName("default", "delegate_refresh_to_connector"), new ConnectorMaterializedViewDefinition("SELECT nationkey FROM mock.default.test_table", Optional.of(new CatalogSchemaTableName("mock", "default", "test_storage")), Optional.of("mock"), Optional.of("default"), ImmutableList.of(new ConnectorMaterializedViewDefinition.Column("nationkey", BIGINT.getTypeId())), Optional.empty(), Optional.of("alice"), ImmutableMap.of()))).withDelegateMaterializedViewRefreshToConnector((connectorSession, schemaTableName) -> true).withRefreshMaterializedView(((connectorSession, schemaTableName) -> {
startRefreshMaterializedView.set(null);
SettableFuture<Void> refreshMaterializedView = SettableFuture.create();
finishRefreshMaterializedView.addListener(() -> refreshMaterializedView.set(null), directExecutor());
addExceptionCallback(refreshMaterializedView, () -> refreshInterrupted.set(null));
return toCompletableFuture(refreshMaterializedView);
})).build()));
queryRunner.createCatalog("mock", "mock");
return queryRunner;
}
use of io.trino.spi.connector.ColumnMetadata in project trino by trinodb.
the class TestErrorThrowableInQuery method createQueryRunner.
@Override
protected DistributedQueryRunner createQueryRunner() throws Exception {
Session session = testSessionBuilder().setSystemProperty("task_concurrency", "1").setCatalog("mock").setSchema("default").setClientInfo("{\"clientVersion\":\"testVersion\"}").build();
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(session).setNodeCount(1).build();
try {
queryRunner.installPlugin(new TpchPlugin());
queryRunner.installPlugin(new ResourceGroupManagerPlugin());
queryRunner.installPlugin(new Plugin() {
@Override
public Iterable<ConnectorFactory> getConnectorFactories() {
SchemaTableName stackOverflowErrorTableName = new SchemaTableName("default", "stack_overflow_during_planning");
SchemaTableName classFormatErrorTableName = new SchemaTableName("default", "class_format_error_during_planning");
MockConnectorFactory connectorFactory = MockConnectorFactory.builder().withListTables((session, s) -> ImmutableList.of(stackOverflowErrorTableName)).withGetColumns(schemaTableName -> ImmutableList.of(new ColumnMetadata("test_varchar", createUnboundedVarcharType()), new ColumnMetadata("test_bigint", BIGINT))).withGetTableHandle((session, schemaTableName) -> new MockConnectorTableHandle(schemaTableName)).withApplyProjection((session, handle, projections, assignments) -> {
MockConnectorTableHandle mockTableHandle = (MockConnectorTableHandle) handle;
if (stackOverflowErrorTableName.equals(mockTableHandle.getTableName())) {
throw new StackOverflowError("We run out of stack!!!!!!!!!!!");
}
if (classFormatErrorTableName.equals(mockTableHandle.getTableName())) {
throw new ClassFormatError("Bad class format!!!!!!!!!!");
}
throw new TrinoException(NOT_FOUND, "Unknown table: " + mockTableHandle.getTableName());
}).build();
return ImmutableList.of(connectorFactory);
}
});
queryRunner.createCatalog("mock", "mock", ImmutableMap.of());
} catch (Exception e) {
queryRunner.close();
throw e;
}
return queryRunner;
}
use of io.trino.spi.connector.ColumnMetadata in project TiBigData by tidb-incubator.
the class TiDBMetadata method createTable.
@Override
public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata, boolean ignoreExisting) {
List<ColumnMetadata> columns = tableMetadata.getColumns();
SchemaTableName table = tableMetadata.getTable();
String schemaName = table.getSchemaName();
String tableName = table.getTableName();
List<String> columnNames = columns.stream().map(ColumnMetadata::getName).collect(toImmutableList());
List<String> columnTypes = columns.stream().map(column -> TypeHelpers.toSqlString(column.getType())).collect(toImmutableList());
List<String> primaryKeyColumns = Arrays.stream(tableMetadata.getProperties().get(PRIMARY_KEY).toString().split(",")).filter(s -> !s.isEmpty()).collect(Collectors.toList());
checkArgument(columnNames.containsAll(primaryKeyColumns), "invalid primary key columns: " + primaryKeyColumns);
List<String> uniqueKeyColumns = Arrays.stream(tableMetadata.getProperties().get(UNIQUE_KEY).toString().split(",")).filter(s -> !s.isEmpty()).collect(Collectors.toList());
checkArgument(columnNames.containsAll(uniqueKeyColumns), "invalid unique key columns: " + uniqueKeyColumns);
getInternal().createTable(schemaName, tableName, columnNames, columnTypes, primaryKeyColumns, uniqueKeyColumns, ignoreExisting);
}
Aggregations