Search in sources :

Example 1 with StreamsBuilder

use of org.apache.kafka.streams.StreamsBuilder in project ksql by confluentinc.

the class JoinNodeTest method shouldBuildTableNodeWithCorrectAutoCommitOffsetPolicy.

@Test
public void shouldBuildTableNodeWithCorrectAutoCommitOffsetPolicy() {
    setupTopicClientExpectations(1, 1);
    buildJoin();
    KsqlConfig ksqlConfig = mock(KsqlConfig.class);
    KafkaTopicClient kafkaTopicClient = mock(KafkaTopicClient.class);
    FunctionRegistry functionRegistry = mock(FunctionRegistry.class);
    class RightTable extends PlanNode {

        final Schema schema;

        public RightTable(final PlanNodeId id, Schema schema) {
            super(id);
            this.schema = schema;
        }

        @Override
        public Schema getSchema() {
            return schema;
        }

        @Override
        public Field getKeyField() {
            return null;
        }

        @Override
        public List<PlanNode> getSources() {
            return null;
        }

        @Override
        public SchemaKStream buildStream(StreamsBuilder builder, KsqlConfig ksqlConfig, KafkaTopicClient kafkaTopicClient, FunctionRegistry functionRegistry, Map<String, Object> props, SchemaRegistryClient schemaRegistryClient) {
            if (props.containsKey(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG) && props.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG).toString().equalsIgnoreCase("EARLIEST")) {
                return mock(SchemaKTable.class);
            } else {
                throw new KsqlException("auto.offset.reset should be set to EARLIEST.");
            }
        }

        @Override
        protected int getPartitions(KafkaTopicClient kafkaTopicClient) {
            return 1;
        }
    }
    RightTable rightTable = new RightTable(new PlanNodeId("1"), joinNode.getRight().getSchema());
    JoinNode testJoinNode = new JoinNode(joinNode.getId(), joinNode.getType(), joinNode.getLeft(), rightTable, joinNode.getLeftKeyFieldName(), joinNode.getRightKeyFieldName(), joinNode.getLeftAlias(), joinNode.getRightAlias());
    testJoinNode.tableForJoin(builder, ksqlConfig, kafkaTopicClient, functionRegistry, new HashMap<>(), new MockSchemaRegistryClient());
}
Also used : MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Schema(org.apache.kafka.connect.data.Schema) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KsqlException(io.confluent.ksql.util.KsqlException) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) HashMap(java.util.HashMap) Map(java.util.Map) SchemaRegistryClient(io.confluent.kafka.schemaregistry.client.SchemaRegistryClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Test(org.junit.Test)

Example 2 with StreamsBuilder

use of org.apache.kafka.streams.StreamsBuilder in project ksql by confluentinc.

the class KsqlStructuredDataOutputNodeTest method shouldCreateSinkWithCorrectCleanupPolicyStream.

@Test
public void shouldCreateSinkWithCorrectCleanupPolicyStream() {
    KafkaTopicClient topicClientForWindowTable = EasyMock.mock(KafkaTopicClient.class);
    StreamsBuilder streamsBuilder = new StreamsBuilder();
    topicClientForWindowTable.createTopic("output", 4, (short) 3, Collections.emptyMap());
    EasyMock.replay(topicClientForWindowTable);
    SchemaKStream schemaKStream = outputNode.buildStream(streamsBuilder, ksqlConfig, topicClientForWindowTable, new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
    assertThat(schemaKStream, instanceOf(SchemaKStream.class));
    EasyMock.verify();
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) SchemaKStream(io.confluent.ksql.structured.SchemaKStream) Test(org.junit.Test)

Example 3 with StreamsBuilder

use of org.apache.kafka.streams.StreamsBuilder in project ksql by confluentinc.

the class SchemaKStreamTest method init.

@Before
public void init() {
    functionRegistry = new FunctionRegistry();
    ksqlStream = (KsqlStream) metaStore.getSource("TEST1");
    StreamsBuilder builder = new StreamsBuilder();
    kStream = builder.stream(ksqlStream.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), ksqlStream.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap()), false, new MockSchemaRegistryClient())));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig) Before(org.junit.Before)

Example 4 with StreamsBuilder

use of org.apache.kafka.streams.StreamsBuilder in project ksql by confluentinc.

the class SchemaKTableTest method init.

@Before
public void init() {
    functionRegistry = new FunctionRegistry();
    ksqlTable = (KsqlTable) metaStore.getSource("TEST2");
    StreamsBuilder builder = new StreamsBuilder();
    kTable = builder.table(ksqlTable.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), ksqlTable.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap()), false, new MockSchemaRegistryClient())));
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) FunctionRegistry(io.confluent.ksql.function.FunctionRegistry) MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) KsqlConfig(io.confluent.ksql.util.KsqlConfig) Before(org.junit.Before)

Example 5 with StreamsBuilder

use of org.apache.kafka.streams.StreamsBuilder in project ksql by confluentinc.

the class KsqlStructuredDataOutputNode method buildStream.

@Override
public SchemaKStream buildStream(final StreamsBuilder builder, final KsqlConfig ksqlConfig, final KafkaTopicClient kafkaTopicClient, final FunctionRegistry functionRegistry, final Map<String, Object> props, final SchemaRegistryClient schemaRegistryClient) {
    final SchemaKStream schemaKStream = getSource().buildStream(builder, ksqlConfig, kafkaTopicClient, functionRegistry, props, schemaRegistryClient);
    final Set<Integer> rowkeyIndexes = SchemaUtil.getRowTimeRowKeyIndexes(getSchema());
    final Builder outputNodeBuilder = Builder.from(this);
    final Schema schema = SchemaUtil.removeImplicitRowTimeRowKeyFromSchema(getSchema());
    outputNodeBuilder.withSchema(schema);
    if (getTopicSerde() instanceof KsqlAvroTopicSerDe) {
        addAvroSchemaToResultTopic(outputNodeBuilder);
    }
    final Map<String, Object> outputProperties = getOutputProperties();
    if (outputProperties.containsKey(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY)) {
        ksqlConfig.put(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY, outputProperties.get(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY));
    }
    if (outputProperties.containsKey(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY)) {
        ksqlConfig.put(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY, outputProperties.get(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY));
    }
    final SchemaKStream result = createOutputStream(schemaKStream, outputNodeBuilder, functionRegistry, outputProperties, schemaRegistryClient);
    final KsqlStructuredDataOutputNode noRowKey = outputNodeBuilder.build();
    createSinkTopic(noRowKey.getKafkaTopicName(), ksqlConfig, kafkaTopicClient, shoulBeCompacted(result));
    result.into(noRowKey.getKafkaTopicName(), noRowKey.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(noRowKey.getSchema(), ksqlConfig, false, schemaRegistryClient), rowkeyIndexes);
    result.setOutputNode(outputNodeBuilder.withSchema(SchemaUtil.addImplicitRowTimeRowKeyToSchema(noRowKey.getSchema())).build());
    return result;
}
Also used : KsqlAvroTopicSerDe(io.confluent.ksql.serde.avro.KsqlAvroTopicSerDe) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) Schema(org.apache.kafka.connect.data.Schema) SchemaKStream(io.confluent.ksql.structured.SchemaKStream)

Aggregations

StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)593 Test (org.junit.Test)462 Properties (java.util.Properties)244 KafkaStreams (org.apache.kafka.streams.KafkaStreams)203 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)173 KeyValue (org.apache.kafka.streams.KeyValue)166 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)157 Serdes (org.apache.kafka.common.serialization.Serdes)140 Topology (org.apache.kafka.streams.Topology)120 StreamsConfig (org.apache.kafka.streams.StreamsConfig)113 IntegrationTest (org.apache.kafka.test.IntegrationTest)104 MockApiProcessorSupplier (org.apache.kafka.test.MockApiProcessorSupplier)95 Before (org.junit.Before)93 KStream (org.apache.kafka.streams.kstream.KStream)89 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)86 Consumed (org.apache.kafka.streams.kstream.Consumed)86 List (java.util.List)79 Duration (java.time.Duration)77 KTable (org.apache.kafka.streams.kstream.KTable)76 KeyValueStore (org.apache.kafka.streams.state.KeyValueStore)75