Search in sources :

Example 1 with SqlCatalog

use of com.hazelcast.sql.impl.schema.SqlCatalog in project hazelcast by hazelcast.

the class SqlServiceImpl method prepare.

private SqlPlan prepare(String schema, String sql, List<Object> arguments, SqlExpectedResultType expectedResultType) {
    List<List<String>> searchPaths = prepareSearchPaths(schema);
    PlanKey planKey = new PlanKey(searchPaths, sql);
    SqlPlan plan = planCache.get(planKey);
    if (plan == null) {
        SqlCatalog catalog = new SqlCatalog(optimizer.tableResolvers());
        plan = optimizer.prepare(new OptimizationTask(sql, arguments, searchPaths, catalog));
        if (plan.isCacheable()) {
            planCache.put(planKey, plan);
        }
    }
    checkReturnType(plan, expectedResultType);
    return plan;
}
Also used : SqlCatalog(com.hazelcast.sql.impl.schema.SqlCatalog) PlanKey(com.hazelcast.sql.impl.optimizer.PlanKey) OptimizationTask(com.hazelcast.sql.impl.optimizer.OptimizationTask) SqlPlan(com.hazelcast.sql.impl.optimizer.SqlPlan) ArrayList(java.util.ArrayList) Arrays.asList(java.util.Arrays.asList) List(java.util.List)

Example 2 with SqlCatalog

use of com.hazelcast.sql.impl.schema.SqlCatalog in project hazelcast by hazelcast.

the class ParserOperationsTest method createContext.

private static OptimizerContext createContext() {
    PartitionedMapTable partitionedMapTable = new PartitionedMapTable("public", "t", "t", Arrays.asList(field("a"), field("b")), new ConstantTableStatistics(100L), null, null, null, null, null, false);
    TableResolver resolver = TestTableResolver.create("public", partitionedMapTable);
    List<TableResolver> tableResolvers = Collections.singletonList(resolver);
    List<List<String>> searchPaths = QueryUtils.prepareSearchPaths(emptyList(), tableResolvers);
    return OptimizerContext.create(new SqlCatalog(tableResolvers), searchPaths, emptyList(), 1, name -> null);
}
Also used : SqlCatalog(com.hazelcast.sql.impl.schema.SqlCatalog) TableResolver(com.hazelcast.sql.impl.schema.TableResolver) TestTableResolver(com.hazelcast.jet.sql.impl.TestTableResolver) PartitionedMapTable(com.hazelcast.sql.impl.schema.map.PartitionedMapTable) Collections.emptyList(java.util.Collections.emptyList) List(java.util.List) ConstantTableStatistics(com.hazelcast.sql.impl.schema.ConstantTableStatistics)

Example 3 with SqlCatalog

use of com.hazelcast.sql.impl.schema.SqlCatalog in project hazelcast by hazelcast.

the class ParserNameResolutionTest method createContext.

private static OptimizerContext createContext() {
    PartitionedMapTable table1 = new PartitionedMapTable(SCHEMA_1, TABLE_1, TABLE_1, Arrays.asList(field(FIELD_1), field(FIELD_2)), new ConstantTableStatistics(100L), null, null, null, null, null, false);
    PartitionedMapTable table2 = new PartitionedMapTable(SCHEMA_2, TABLE_2, TABLE_2, Arrays.asList(field(FIELD_1), field(FIELD_2)), new ConstantTableStatistics(100L), null, null, null, null, null, false);
    TableResolver resolver1 = TestTableResolver.create(SCHEMA_1, table1);
    TableResolver resolver2 = TestTableResolver.create(SCHEMA_2, table2);
    List<TableResolver> tableResolvers = Arrays.asList(resolver1, resolver2);
    List<List<String>> searchPaths = QueryUtils.prepareSearchPaths(emptyList(), tableResolvers);
    return OptimizerContext.create(new SqlCatalog(tableResolvers), searchPaths, emptyList(), 1, name -> null);
}
Also used : SqlCatalog(com.hazelcast.sql.impl.schema.SqlCatalog) TableResolver(com.hazelcast.sql.impl.schema.TableResolver) TestTableResolver(com.hazelcast.jet.sql.impl.TestTableResolver) PartitionedMapTable(com.hazelcast.sql.impl.schema.map.PartitionedMapTable) Collections.emptyList(java.util.Collections.emptyList) List(java.util.List) SqlNodeList(org.apache.calcite.sql.SqlNodeList) ConstantTableStatistics(com.hazelcast.sql.impl.schema.ConstantTableStatistics)

Example 4 with SqlCatalog

use of com.hazelcast.sql.impl.schema.SqlCatalog in project hazelcast by hazelcast.

the class PlanCacheChecker method check.

public void check() {
    if (planCache.size() == 0) {
        return;
    }
    // Collect object IDs
    SqlCatalog catalog = new SqlCatalog(tableResolvers);
    Set<PlanObjectKey> objectKeys = new HashSet<>();
    for (Map<String, Table> tableMap : catalog.getSchemas().values()) {
        for (Table table : tableMap.values()) {
            PlanObjectKey objectKey = table.getObjectKey();
            if (objectKey != null) {
                objectKeys.add(objectKey);
            }
        }
    }
    // Prepare partition distribution
    Map<UUID, PartitionIdSet> partitions = QueryUtils.createPartitionMap(nodeEngine, null, false);
    // Do check
    planCache.check(new PlanCheckContext(objectKeys, partitions));
}
Also used : SqlCatalog(com.hazelcast.sql.impl.schema.SqlCatalog) PlanCheckContext(com.hazelcast.sql.impl.optimizer.PlanCheckContext) Table(com.hazelcast.sql.impl.schema.Table) PlanObjectKey(com.hazelcast.sql.impl.optimizer.PlanObjectKey) PartitionIdSet(com.hazelcast.internal.util.collection.PartitionIdSet) UUID(java.util.UUID) HashSet(java.util.HashSet)

Aggregations

SqlCatalog (com.hazelcast.sql.impl.schema.SqlCatalog)4 List (java.util.List)3 TestTableResolver (com.hazelcast.jet.sql.impl.TestTableResolver)2 ConstantTableStatistics (com.hazelcast.sql.impl.schema.ConstantTableStatistics)2 TableResolver (com.hazelcast.sql.impl.schema.TableResolver)2 PartitionedMapTable (com.hazelcast.sql.impl.schema.map.PartitionedMapTable)2 Collections.emptyList (java.util.Collections.emptyList)2 PartitionIdSet (com.hazelcast.internal.util.collection.PartitionIdSet)1 OptimizationTask (com.hazelcast.sql.impl.optimizer.OptimizationTask)1 PlanCheckContext (com.hazelcast.sql.impl.optimizer.PlanCheckContext)1 PlanKey (com.hazelcast.sql.impl.optimizer.PlanKey)1 PlanObjectKey (com.hazelcast.sql.impl.optimizer.PlanObjectKey)1 SqlPlan (com.hazelcast.sql.impl.optimizer.SqlPlan)1 Table (com.hazelcast.sql.impl.schema.Table)1 ArrayList (java.util.ArrayList)1 Arrays.asList (java.util.Arrays.asList)1 HashSet (java.util.HashSet)1 UUID (java.util.UUID)1 SqlNodeList (org.apache.calcite.sql.SqlNodeList)1