use of io.trino.testing.QueryRunner in project trino by trinodb.
the class TestEventListenerBasic method createQueryRunner.
@Override
protected QueryRunner createQueryRunner() throws Exception {
Session session = testSessionBuilder().setSystemProperty("task_concurrency", "1").setCatalog("tpch").setSchema("tiny").setClientInfo("{\"clientVersion\":\"testVersion\"}").build();
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(session).setNodeCount(1).build();
queryRunner.installPlugin(new TpchPlugin());
queryRunner.installPlugin(new TestingEventListenerPlugin(generatedEvents));
queryRunner.installPlugin(new ResourceGroupManagerPlugin());
queryRunner.createCatalog("tpch", "tpch");
queryRunner.installPlugin(new Plugin() {
@Override
public Iterable<ConnectorFactory> getConnectorFactories() {
MockConnectorFactory connectorFactory = MockConnectorFactory.builder().withListTables((session, s) -> ImmutableList.of(new SchemaTableName("default", "tests_table"))).withGetColumns(schemaTableName -> ImmutableList.of(new ColumnMetadata("test_varchar", createVarcharType(15)), new ColumnMetadata("test_bigint", BIGINT))).withGetTableHandle((session, schemaTableName) -> {
if (!schemaTableName.getTableName().startsWith("create")) {
return new MockConnectorTableHandle(schemaTableName);
}
return null;
}).withApplyProjection((session, handle, projections, assignments) -> {
if (((MockConnectorTableHandle) handle).getTableName().getTableName().equals("tests_table")) {
throw new RuntimeException("Throw from apply projection");
}
return Optional.empty();
}).withGetViews((connectorSession, prefix) -> {
ConnectorViewDefinition definition = new ConnectorViewDefinition("SELECT nationkey AS test_column FROM tpch.tiny.nation", Optional.empty(), Optional.empty(), ImmutableList.of(new ConnectorViewDefinition.ViewColumn("test_column", BIGINT.getTypeId())), Optional.empty(), Optional.empty(), true);
SchemaTableName viewName = new SchemaTableName("default", "test_view");
return ImmutableMap.of(viewName, definition);
}).withGetMaterializedViews((connectorSession, prefix) -> {
ConnectorMaterializedViewDefinition definition = new ConnectorMaterializedViewDefinition("SELECT nationkey AS test_column FROM tpch.tiny.nation", Optional.empty(), Optional.empty(), Optional.empty(), ImmutableList.of(new Column("test_column", BIGINT.getTypeId())), Optional.empty(), Optional.of("alice"), ImmutableMap.of());
SchemaTableName materializedViewName = new SchemaTableName("default", "test_materialized_view");
return ImmutableMap.of(materializedViewName, definition);
}).withRowFilter(schemaTableName -> {
if (schemaTableName.getTableName().equals("test_table_with_row_filter")) {
return new ViewExpression("user", Optional.of("tpch"), Optional.of("tiny"), "EXISTS (SELECT 1 FROM nation WHERE name = test_varchar)");
}
return null;
}).withColumnMask((schemaTableName, columnName) -> {
if (schemaTableName.getTableName().equals("test_table_with_column_mask") && columnName.equals("test_varchar")) {
return new ViewExpression("user", Optional.of("tpch"), Optional.of("tiny"), "(SELECT cast(max(orderkey) AS varchar(15)) FROM orders)");
}
return null;
}).build();
return ImmutableList.of(connectorFactory);
}
});
queryRunner.createCatalog("mock", "mock", ImmutableMap.of());
queryRunner.getCoordinator().getResourceGroupManager().get().setConfigurationManager("file", ImmutableMap.of("resource-groups.config-file", getResourceFilePath("resource_groups_config_simple.json")));
queries = new EventsAwaitingQueries(generatedEvents, queryRunner, Duration.ofSeconds(1));
return queryRunner;
}
use of io.trino.testing.QueryRunner in project trino by trinodb.
the class TestAccessControl method createQueryRunner.
@Override
protected QueryRunner createQueryRunner() throws Exception {
Session session = testSessionBuilder().setCatalog("blackhole").setSchema("default").build();
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(session).setNodeCount(1).build();
queryRunner.installPlugin(new BlackHolePlugin());
queryRunner.createCatalog("blackhole", "blackhole");
queryRunner.installPlugin(new TpchPlugin());
queryRunner.createCatalog("tpch", "tpch");
queryRunner.installPlugin(new MockConnectorPlugin(MockConnectorFactory.builder().withGetViews((connectorSession, prefix) -> {
ConnectorViewDefinition definitionRunAsDefiner = new ConnectorViewDefinition("select 1", Optional.of("mock"), Optional.of("default"), ImmutableList.of(new ConnectorViewDefinition.ViewColumn("test", BIGINT.getTypeId())), Optional.of("comment"), Optional.of("admin"), false);
ConnectorViewDefinition definitionRunAsInvoker = new ConnectorViewDefinition("select 1", Optional.of("mock"), Optional.of("default"), ImmutableList.of(new ConnectorViewDefinition.ViewColumn("test", BIGINT.getTypeId())), Optional.of("comment"), Optional.empty(), true);
return ImmutableMap.of(new SchemaTableName("default", "test_view_definer"), definitionRunAsDefiner, new SchemaTableName("default", "test_view_invoker"), definitionRunAsInvoker);
}).withListRoleGrants((connectorSession, roles, grantees, limit) -> ImmutableSet.of(new RoleGrant(new TrinoPrincipal(USER, "alice"), "alice_role", false))).build()));
queryRunner.createCatalog("mock", "mock");
for (String tableName : ImmutableList.of("orders", "nation", "region", "lineitem")) {
queryRunner.execute(format("CREATE TABLE %1$s AS SELECT * FROM tpch.tiny.%1$s WITH NO DATA", tableName));
}
return queryRunner;
}
use of io.trino.testing.QueryRunner in project trino by trinodb.
the class TestEventListenerWithSplits method createQueryRunner.
@Override
protected QueryRunner createQueryRunner() throws Exception {
Session session = testSessionBuilder().setSystemProperty("task_concurrency", "1").setCatalog("tpch").setSchema("tiny").setClientInfo("{\"clientVersion\":\"testVersion\"}").build();
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(session).setNodeCount(1).build();
queryRunner.installPlugin(new TpchPlugin());
queryRunner.installPlugin(new TestingEventListenerPlugin(generatedEvents));
queryRunner.installPlugin(new ResourceGroupManagerPlugin());
queryRunner.createCatalog("tpch", "tpch", ImmutableMap.of(TPCH_SPLITS_PER_NODE, Integer.toString(SPLITS_PER_NODE)));
queryRunner.installPlugin(new Plugin() {
@Override
public Iterable<ConnectorFactory> getConnectorFactories() {
MockConnectorFactory connectorFactory = MockConnectorFactory.builder().withListTables((session, s) -> ImmutableList.of(new SchemaTableName("default", "test_table"))).withApplyProjection((session, handle, projections, assignments) -> {
throw new RuntimeException("Throw from apply projection");
}).build();
return ImmutableList.of(connectorFactory);
}
});
queryRunner.createCatalog("mock", "mock", ImmutableMap.of());
queryRunner.getCoordinator().getResourceGroupManager().get().setConfigurationManager("file", ImmutableMap.of("resource-groups.config-file", getResourceFilePath("resource_groups_config_simple.json")));
queries = new EventsAwaitingQueries(generatedEvents, queryRunner, Duration.ofSeconds(1));
return queryRunner;
}
use of io.trino.testing.QueryRunner in project trino by trinodb.
the class TestTableRedirection method createQueryRunner.
@Override
protected QueryRunner createQueryRunner() throws Exception {
QueryRunner queryRunner = DistributedQueryRunner.builder(TEST_SESSION).build();
queryRunner.installPlugin(new MockConnectorPlugin(createMockConnectorFactory()));
queryRunner.createCatalog(CATALOG_NAME, "mock", ImmutableMap.of());
return queryRunner;
}
use of io.trino.testing.QueryRunner in project trino by trinodb.
the class TestKafkaIntegrationPushDown method createQueryRunner.
@Override
protected QueryRunner createQueryRunner() throws Exception {
testingKafka = closeAfterClass(TestingKafka.create());
topicNamePartition = "test_push_down_partition_" + UUID.randomUUID().toString().replaceAll("-", "_");
topicNameOffset = "test_push_down_offset_" + UUID.randomUUID().toString().replaceAll("-", "_");
topicNameCreateTime = "test_push_down_create_time_" + UUID.randomUUID().toString().replaceAll("-", "_");
topicNameLogAppend = "test_push_down_log_append_" + UUID.randomUUID().toString().replaceAll("-", "_");
QueryRunner queryRunner = KafkaQueryRunner.builder(testingKafka).setExtraTopicDescription(ImmutableMap.<SchemaTableName, KafkaTopicDescription>builder().put(createEmptyTopicDescription(topicNamePartition, new SchemaTableName("default", topicNamePartition))).put(createEmptyTopicDescription(topicNameOffset, new SchemaTableName("default", topicNameOffset))).put(createEmptyTopicDescription(topicNameCreateTime, new SchemaTableName("default", topicNameCreateTime))).put(createEmptyTopicDescription(topicNameLogAppend, new SchemaTableName("default", topicNameLogAppend))).buildOrThrow()).setExtraKafkaProperties(ImmutableMap.<String, String>builder().put("kafka.messages-per-split", "100").buildOrThrow()).build();
testingKafka.createTopicWithConfig(2, 1, topicNamePartition, false);
testingKafka.createTopicWithConfig(2, 1, topicNameOffset, false);
testingKafka.createTopicWithConfig(1, 1, topicNameCreateTime, false);
testingKafka.createTopicWithConfig(1, 1, topicNameLogAppend, true);
return queryRunner;
}
Aggregations