Search in sources :

Example 1 with FunctionRegistry

use of io.confluent.ksql.function.FunctionRegistry in project ksql by confluentinc.

the class JoinNodeTest method shouldBuildTableNodeWithCorrectAutoCommitOffsetPolicy.

@Test
public void shouldBuildTableNodeWithCorrectAutoCommitOffsetPolicy() {
    setupTopicClientExpectations(1, 1);
    buildJoin();
    KsqlConfig ksqlConfig = mock(KsqlConfig.class);
    KafkaTopicClient kafkaTopicClient = mock(KafkaTopicClient.class);
    FunctionRegistry functionRegistry = mock(FunctionRegistry.class);
    class RightTable extends PlanNode {

        final Schema schema;

        public RightTable(final PlanNodeId id, Schema schema) {
            super(id);
            this.schema = schema;
        }

        @Override
        public Schema getSchema() {
            return schema;
        }

        @Override
        public Field getKeyField() {
            return null;
        }

        @Override
        public List<PlanNode> getSources() {
            return null;
        }

        @Override
        public SchemaKStream buildStream(StreamsBuilder builder, KsqlConfig ksqlConfig, KafkaTopicClient kafkaTopicClient, FunctionRegistry functionRegistry, Map<String, Object> props, SchemaRegistryClient schemaRegistryClient) {
            if (props.containsKey(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG) && props.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG).toString().equalsIgnoreCase("EARLIEST")) {
                return mock(SchemaKTable.class);
            } else {
                throw new KsqlException("auto.offset.reset should be set to EARLIEST.");
            }
        }

        @Override
        protected int getPartitions(KafkaTopicClient kafkaTopicClient) {
            return 1;
        }
    }
    RightTable rightTable = new RightTable(new PlanNodeId("1"), joinNode.getRight().getSchema());
    JoinNode testJoinNode = new JoinNode(joinNode.getId(), joinNode.getType(), joinNode.getLeft(), rightTable, joinNode.getLeftKeyFieldName(), joinNode.getRightKeyFieldName(), joinNode.getLeftAlias(), joinNode.getRightAlias());
    testJoinNode.tableForJoin(builder, ksqlConfig, kafkaTopicClient, functionRegistry, new HashMap<>(), new MockSchemaRegistryClient());
}
Also used : MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Schema(org.apache.kafka.connect.data.Schema) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KsqlException(io.confluent.ksql.util.KsqlException) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) HashMap(java.util.HashMap) Map(java.util.Map) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Test(org.junit.Test)

Example 2 with FunctionRegistry

use of io.confluent.ksql.function.FunctionRegistry in project ksql by confluentinc.

the class KsqlBareOutputNodeTest method build.

private SchemaKStream build() {
    final String simpleSelectFilter = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
    final KsqlBareOutputNode planNode = (KsqlBareOutputNode) planBuilder.buildLogicalPlan(simpleSelectFilter);
    return planNode.buildStream(builder, new KsqlConfig(Collections.emptyMap()), new FakeKafkaTopicClient(), new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
}
Also used : FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) FakeKafkaTopicClient(io.confluent.ksql.util.FakeKafkaTopicClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig)

Example 3 with FunctionRegistry

use of io.confluent.ksql.function.FunctionRegistry in project ksql by confluentinc.

the class KsqlStructuredDataOutputNodeTest method shouldCreateSinkWithCorrectCleanupPolicyStream.

@Test
public void shouldCreateSinkWithCorrectCleanupPolicyStream() {
    KafkaTopicClient topicClientForWindowTable = EasyMock.mock(KafkaTopicClient.class);
    StreamsBuilder streamsBuilder = new StreamsBuilder();
    topicClientForWindowTable.createTopic("output", 4, (short) 3, Collections.emptyMap());
    EasyMock.replay(topicClientForWindowTable);
    SchemaKStream schemaKStream = outputNode.buildStream(streamsBuilder, ksqlConfig, topicClientForWindowTable, new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
    assertThat(schemaKStream, instanceOf(SchemaKStream.class));
    EasyMock.verify();
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaKStream(io.confluent.ksql.structured.SchemaKStream) Test(org.junit.Test)

Example 4 with FunctionRegistry

use of io.confluent.ksql.function.FunctionRegistry in project ksql by confluentinc.

the class AggregateAnalyzerTest method init.

@Before
public void init() {
    metaStore = MetaStoreFixture.getNewMetaStore();
    functionRegistry = new FunctionRegistry();
}
Also used : FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) Before(org.junit.Before)

Example 5 with FunctionRegistry

use of io.confluent.ksql.function.FunctionRegistry in project ksql by confluentinc.

the class CodeGenRunnerTest method init.

@Before
public void init() {
    metaStore = MetaStoreFixture.getNewMetaStore();
    functionRegistry = new FunctionRegistry();
    schema = SchemaBuilder.struct().field("CODEGEN_TEST.COL0", SchemaBuilder.INT64_SCHEMA).field("CODEGEN_TEST.COL1", SchemaBuilder.STRING_SCHEMA).field("CODEGEN_TEST.COL2", SchemaBuilder.STRING_SCHEMA).field("CODEGEN_TEST.COL3", SchemaBuilder.FLOAT64_SCHEMA).field("CODEGEN_TEST.COL4", SchemaBuilder.FLOAT64_SCHEMA).field("CODEGEN_TEST.COL5", SchemaBuilder.INT32_SCHEMA).field("CODEGEN_TEST.COL6", SchemaBuilder.BOOLEAN_SCHEMA).field("CODEGEN_TEST.COL7", SchemaBuilder.BOOLEAN_SCHEMA).field("CODEGEN_TEST.COL8", SchemaBuilder.INT64_SCHEMA).field("CODEGEN_TEST.COL9", SchemaBuilder.array(SchemaBuilder.INT32_SCHEMA)).field("CODEGEN_TEST.COL10", SchemaBuilder.array(SchemaBuilder.INT32_SCHEMA)).field("CODEGEN_TEST.COL11", SchemaBuilder.map(SchemaBuilder.INT32_SCHEMA, SchemaBuilder.INT32_SCHEMA)).field("CODEGEN_TEST.COL12", SchemaBuilder.map(SchemaBuilder.INT32_SCHEMA, SchemaBuilder.INT32_SCHEMA));
    Schema metaStoreSchema = SchemaBuilder.struct().field("COL0", SchemaBuilder.INT64_SCHEMA).field("COL1", SchemaBuilder.STRING_SCHEMA).field("COL2", SchemaBuilder.STRING_SCHEMA).field("COL3", SchemaBuilder.FLOAT64_SCHEMA).field("COL4", SchemaBuilder.FLOAT64_SCHEMA).field("COL5", SchemaBuilder.INT32_SCHEMA).field("COL6", SchemaBuilder.BOOLEAN_SCHEMA).field("COL7", SchemaBuilder.BOOLEAN_SCHEMA).field("COL8", SchemaBuilder.INT64_SCHEMA).field("COL9", SchemaBuilder.array(SchemaBuilder.INT32_SCHEMA)).field("COL10", SchemaBuilder.array(SchemaBuilder.INT32_SCHEMA)).field("COL11", SchemaBuilder.map(SchemaBuilder.INT32_SCHEMA, SchemaBuilder.INT32_SCHEMA)).field("COL12", SchemaBuilder.map(SchemaBuilder.INT32_SCHEMA, SchemaBuilder.INT32_SCHEMA));
    KsqlTopic ksqlTopic = new KsqlTopic("CODEGEN_TEST", "codegen_test", new KsqlJsonTopicSerDe());
    KsqlStream ksqlStream = new KsqlStream("sqlexpression", "CODEGEN_TEST", metaStoreSchema, metaStoreSchema.field("COL0"), null, ksqlTopic);
    metaStore.putTopic(ksqlTopic);
    metaStore.putSource(ksqlStream);
    codeGenRunner = new CodeGenRunner(schema, functionRegistry);
}
Also used : FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) KsqlStream(io.confluent.ksql.metastore.KsqlStream) KsqlJsonTopicSerDe(io.confluent.ksql.serde.json.KsqlJsonTopicSerDe) Schema(org.apache.kafka.connect.data.Schema) KsqlTopic(io.confluent.ksql.metastore.KsqlTopic) Before(org.junit.Before)

Aggregations

FunctionRegistry (io.confluent.ksql.function.FunctionRegistry)16 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)9 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)8 Before (org.junit.Before)8 KsqlConfig (io.confluent.ksql.util.KsqlConfig)7 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)4 Test (org.junit.Test)4 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)3 Schema (org.apache.kafka.connect.data.Schema)3 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)2 CodeGenRunner (io.confluent.ksql.codegen.CodeGenRunner)2 Expression (io.confluent.ksql.parser.tree.Expression)2 SchemaKTable (io.confluent.ksql.structured.SchemaKTable)2 ExpressionMetadata (io.confluent.ksql.util.ExpressionMetadata)2 FakeKafkaTopicClient (io.confluent.ksql.util.FakeKafkaTopicClient)2 KsqlException (io.confluent.ksql.util.KsqlException)2 ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 GenericRow (io.confluent.ksql.GenericRow)1 KsqlStream (io.confluent.ksql.metastore.KsqlStream)1