Search in sources :

Example 1 with MetaStore

use of io.confluent.ksql.metastore.MetaStore in project ksql by confluentinc.

the class KsqlEngine method buildMultipleQueries.

/**
 * Runs the set of queries in the given query string.
 *
 * @param queriesString The ksql query string.
 * @return List of query metadata.
 * @throws Exception Any exception thrown here!
 */
public List<QueryMetadata> buildMultipleQueries(final String queriesString, final Map<String, Object> overriddenProperties) throws Exception {
    for (String property : overriddenProperties.keySet()) {
        if (IMMUTABLE_PROPERTIES.contains(property)) {
            throw new IllegalArgumentException(String.format("Cannot override property '%s'", property));
        }
    }
    // Multiple queries submitted as the same time should success or fail as a whole,
    // Thus we use tempMetaStore to store newly created tables, streams or topics.
    // MetaStore tempMetaStore = new MetaStoreImpl(metaStore);
    MetaStore tempMetaStore = metaStore.clone();
    // Build query AST from the query string
    List<Pair<String, Statement>> queries = parseQueries(queriesString, overriddenProperties, tempMetaStore);
    return planQueries(queries, overriddenProperties, tempMetaStore);
}
Also used : MetaStore(io.confluent.ksql.metastore.MetaStore) Pair(io.confluent.ksql.util.Pair)

Example 2 with MetaStore

use of io.confluent.ksql.metastore.MetaStore in project ksql by confluentinc.

the class KsqlEngine method parseQueries.

// Visible for Testing
List<Pair<String, Statement>> parseQueries(final String queriesString, final Map<String, Object> overriddenProperties, final MetaStore tempMetaStore) {
    try {
        MetaStore tempMetaStoreForParser = tempMetaStore.clone();
        // Parse and AST creation
        KsqlParser ksqlParser = new KsqlParser();
        List<SqlBaseParser.SingleStatementContext> parsedStatements = ksqlParser.getStatements(queriesString);
        List<Pair<String, Statement>> queryList = new ArrayList<>();
        for (SqlBaseParser.SingleStatementContext singleStatementContext : parsedStatements) {
            Pair<Statement, DataSourceExtractor> statementInfo = ksqlParser.prepareStatement(singleStatementContext, tempMetaStoreForParser);
            Statement statement = statementInfo.getLeft();
            Pair<String, Statement> queryPair = buildSingleQueryAst(statement, getStatementString(singleStatementContext), tempMetaStore, tempMetaStoreForParser, overriddenProperties);
            if (queryPair != null) {
                queryList.add(queryPair);
            }
        }
        return queryList;
    } catch (Exception e) {
        throw new ParseFailedException("Parsing failed on KsqlEngine msg:" + e.getMessage(), e);
    }
}
Also used : DataSourceExtractor(io.confluent.ksql.util.DataSourceExtractor) Statement(io.confluent.ksql.parser.tree.Statement) DdlStatement(io.confluent.ksql.parser.tree.DdlStatement) ParseFailedException(io.confluent.ksql.parser.exception.ParseFailedException) ArrayList(java.util.ArrayList) ParseFailedException(io.confluent.ksql.parser.exception.ParseFailedException) KsqlException(io.confluent.ksql.util.KsqlException) MetaStore(io.confluent.ksql.metastore.MetaStore) SqlBaseParser(io.confluent.ksql.parser.SqlBaseParser) KsqlParser(io.confluent.ksql.parser.KsqlParser) Pair(io.confluent.ksql.util.Pair)

Example 3 with MetaStore

use of io.confluent.ksql.metastore.MetaStore in project ksql by confluentinc.

the class QueryEngine method buildLogicalPlans.

List<Pair<String, PlanNode>> buildLogicalPlans(final MetaStore metaStore, final List<Pair<String, Statement>> statementList) {
    List<Pair<String, PlanNode>> logicalPlansList = new ArrayList<>();
    // TODO: the purpose of tempMetaStore here
    MetaStore tempMetaStore = metaStore.clone();
    for (Pair<String, Statement> statementQueryPair : statementList) {
        if (statementQueryPair.getRight() instanceof Query) {
            PlanNode logicalPlan = buildQueryLogicalPlan(statementQueryPair.getLeft(), (Query) statementQueryPair.getRight(), tempMetaStore);
            logicalPlansList.add(new Pair<>(statementQueryPair.getLeft(), logicalPlan));
        } else {
            logicalPlansList.add(new Pair<>(statementQueryPair.getLeft(), null));
        }
        log.info("Build logical plan for {}.", statementQueryPair.getLeft());
    }
    return logicalPlansList;
}
Also used : MetaStore(io.confluent.ksql.metastore.MetaStore) PlanNode(io.confluent.ksql.planner.plan.PlanNode) Query(io.confluent.ksql.parser.tree.Query) DdlStatement(io.confluent.ksql.parser.tree.DdlStatement) AbstractStreamCreateStatement(io.confluent.ksql.parser.tree.AbstractStreamCreateStatement) Statement(io.confluent.ksql.parser.tree.Statement) ArrayList(java.util.ArrayList) Pair(io.confluent.ksql.util.Pair)

Example 4 with MetaStore

use of io.confluent.ksql.metastore.MetaStore in project ksql by confluentinc.

the class JoinNodeTest method shouldHaveAllFieldsFromJoinedInputs.

@Test
public void shouldHaveAllFieldsFromJoinedInputs() {
    setupTopicClientExpectations(1, 1);
    buildJoin();
    final MetaStore metaStore = MetaStoreFixture.getNewMetaStore();
    final StructuredDataSource source1 = metaStore.getSource("TEST1");
    final StructuredDataSource source2 = metaStore.getSource("TEST2");
    final Set<String> expected = source1.getSchema().fields().stream().map(field -> "T1." + field.name()).collect(Collectors.toSet());
    expected.addAll(source2.getSchema().fields().stream().map(field -> "T2." + field.name()).collect(Collectors.toSet()));
    final Set<String> fields = stream.getSchema().fields().stream().map(Field::name).collect(Collectors.toSet());
    assertThat(fields, equalTo(expected));
}
Also used : IntStream(java.util.stream.IntStream) Arrays(java.util.Arrays) SchemaKStream(io.confluent.ksql.structured.SchemaKStream) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) SchemaKTable(io.confluent.ksql.structured.SchemaKTable) LogicalPlanBuilder(io.confluent.ksql.structured.LogicalPlanBuilder) CoreMatchers.equalTo(org.hamcrest.CoreMatchers.equalTo) EasyMock.mock(org.easymock.EasyMock.mock) HashMap(java.util.HashMap) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) Schema(org.apache.kafka.connect.data.Schema) TopologyDescription(org.apache.kafka.streams.TopologyDescription) Map(java.util.Map) MetaStore(io.confluent.ksql.metastore.MetaStore) TopicDescription(org.apache.kafka.clients.admin.TopicDescription) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) LinkedList(java.util.LinkedList) Before(org.junit.Before) Utils(org.apache.kafka.common.utils.Utils) TopicPartitionInfo(org.apache.kafka.common.TopicPartitionInfo) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SOURCE_NODE(io.confluent.ksql.planner.plan.PlanTestUtil.SOURCE_NODE) Field(org.apache.kafka.connect.data.Field) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) Set(java.util.Set) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) Test(org.junit.Test) EasyMock(org.easymock.EasyMock) KsqlConfig(io.confluent.ksql.util.KsqlConfig) MetaStoreFixture(io.confluent.ksql.util.MetaStoreFixture) Collectors(java.util.stream.Collectors) List(java.util.List) MAPVALUES_NODE(io.confluent.ksql.planner.plan.PlanTestUtil.MAPVALUES_NODE) KsqlException(io.confluent.ksql.util.KsqlException) Node(org.apache.kafka.common.Node) StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) Assert(org.junit.Assert) Collections(java.util.Collections) Topology(org.apache.kafka.streams.Topology) PlanTestUtil.getNodeByName(io.confluent.ksql.planner.plan.PlanTestUtil.getNodeByName) MetaStore(io.confluent.ksql.metastore.MetaStore) StructuredDataSource(io.confluent.ksql.metastore.StructuredDataSource) Test(org.junit.Test)

Example 5 with MetaStore

use of io.confluent.ksql.metastore.MetaStore in project ksql by confluentinc.

the class MetaStoreFixture method getNewMetaStore.

public static MetaStore getNewMetaStore() {
    MetaStore metaStore = new MetaStoreImpl();
    SchemaBuilder schemaBuilder1 = SchemaBuilder.struct().field("COL0", SchemaBuilder.INT64_SCHEMA).field("COL1", SchemaBuilder.STRING_SCHEMA).field("COL2", SchemaBuilder.STRING_SCHEMA).field("COL3", SchemaBuilder.FLOAT64_SCHEMA).field("COL4", SchemaBuilder.array(SchemaBuilder.FLOAT64_SCHEMA)).field("COL5", SchemaBuilder.map(SchemaBuilder.STRING_SCHEMA, SchemaBuilder.FLOAT64_SCHEMA));
    KsqlTopic ksqlTopic1 = new KsqlTopic("TEST1", "test1", new KsqlJsonTopicSerDe());
    KsqlStream ksqlStream = new KsqlStream("sqlexpression", "TEST1", schemaBuilder1, schemaBuilder1.field("COL0"), null, ksqlTopic1);
    metaStore.putTopic(ksqlTopic1);
    metaStore.putSource(ksqlStream);
    SchemaBuilder schemaBuilder2 = SchemaBuilder.struct().field("COL0", SchemaBuilder.INT64_SCHEMA).field("COL1", SchemaBuilder.STRING_SCHEMA).field("COL2", SchemaBuilder.STRING_SCHEMA).field("COL3", SchemaBuilder.FLOAT64_SCHEMA).field("COL4", SchemaBuilder.BOOLEAN_SCHEMA);
    KsqlTopic ksqlTopic2 = new KsqlTopic("TEST2", "test2", new KsqlJsonTopicSerDe());
    KsqlTable ksqlTable = new KsqlTable("sqlexpression", "TEST2", schemaBuilder2, schemaBuilder2.field("COL0"), null, ksqlTopic2, "TEST2", false);
    metaStore.putTopic(ksqlTopic2);
    metaStore.putSource(ksqlTable);
    SchemaBuilder schemaBuilderOrders = SchemaBuilder.struct().field("ORDERTIME", SchemaBuilder.INT64_SCHEMA).field("ORDERID", SchemaBuilder.STRING_SCHEMA).field("ITEMID", SchemaBuilder.STRING_SCHEMA).field("ORDERUNITS", SchemaBuilder.FLOAT64_SCHEMA);
    KsqlTopic ksqlTopicOrders = new KsqlTopic("ORDERS_TOPIC", "orders_topic", new KsqlJsonTopicSerDe());
    KsqlStream ksqlStreamOrders = new KsqlStream("sqlexpression", "ORDERS", schemaBuilderOrders, schemaBuilderOrders.field("ORDERTIME"), null, ksqlTopicOrders);
    metaStore.putTopic(ksqlTopicOrders);
    metaStore.putSource(ksqlStreamOrders);
    return metaStore;
}
Also used : MetaStore(io.confluent.ksql.metastore.MetaStore) KsqlStream(io.confluent.ksql.metastore.KsqlStream) MetaStoreImpl(io.confluent.ksql.metastore.MetaStoreImpl) KsqlJsonTopicSerDe(io.confluent.ksql.serde.json.KsqlJsonTopicSerDe) KsqlTable(io.confluent.ksql.metastore.KsqlTable) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) KsqlTopic(io.confluent.ksql.metastore.KsqlTopic)

Aggregations

MetaStore (io.confluent.ksql.metastore.MetaStore)5 Pair (io.confluent.ksql.util.Pair)3 DdlStatement (io.confluent.ksql.parser.tree.DdlStatement)2 Statement (io.confluent.ksql.parser.tree.Statement)2 KsqlException (io.confluent.ksql.util.KsqlException)2 ArrayList (java.util.ArrayList)2 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)1 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)1 FunctionRegistry (io.confluent.ksql.function.FunctionRegistry)1 KsqlStream (io.confluent.ksql.metastore.KsqlStream)1 KsqlTable (io.confluent.ksql.metastore.KsqlTable)1 KsqlTopic (io.confluent.ksql.metastore.KsqlTopic)1 MetaStoreImpl (io.confluent.ksql.metastore.MetaStoreImpl)1 StructuredDataSource (io.confluent.ksql.metastore.StructuredDataSource)1 KsqlParser (io.confluent.ksql.parser.KsqlParser)1 SqlBaseParser (io.confluent.ksql.parser.SqlBaseParser)1 ParseFailedException (io.confluent.ksql.parser.exception.ParseFailedException)1 AbstractStreamCreateStatement (io.confluent.ksql.parser.tree.AbstractStreamCreateStatement)1 Query (io.confluent.ksql.parser.tree.Query)1 PlanNode (io.confluent.ksql.planner.plan.PlanNode)1