use of org.apache.kafka.streams.StreamsBuilder in project ksql by confluentinc.
the class JoinNodeTest method shouldBuildTableNodeWithCorrectAutoCommitOffsetPolicy.
@Test
public void shouldBuildTableNodeWithCorrectAutoCommitOffsetPolicy() {
setupTopicClientExpectations(1, 1);
buildJoin();
KsqlConfig ksqlConfig = mock(KsqlConfig.class);
KafkaTopicClient kafkaTopicClient = mock(KafkaTopicClient.class);
FunctionRegistry functionRegistry = mock(FunctionRegistry.class);
class RightTable extends PlanNode {
final Schema schema;
public RightTable(final PlanNodeId id, Schema schema) {
super(id);
this.schema = schema;
}
@Override
public Schema getSchema() {
return schema;
}
@Override
public Field getKeyField() {
return null;
}
@Override
public List<PlanNode> getSources() {
return null;
}
@Override
public SchemaKStream buildStream(StreamsBuilder builder, KsqlConfig ksqlConfig, KafkaTopicClient kafkaTopicClient, FunctionRegistry functionRegistry, Map<String, Object> props, SchemaRegistryClient schemaRegistryClient) {
if (props.containsKey(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG) && props.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG).toString().equalsIgnoreCase("EARLIEST")) {
return mock(SchemaKTable.class);
} else {
throw new KsqlException("auto.offset.reset should be set to EARLIEST.");
}
}
@Override
protected int getPartitions(KafkaTopicClient kafkaTopicClient) {
return 1;
}
}
RightTable rightTable = new RightTable(new PlanNodeId("1"), joinNode.getRight().getSchema());
JoinNode testJoinNode = new JoinNode(joinNode.getId(), joinNode.getType(), joinNode.getLeft(), rightTable, joinNode.getLeftKeyFieldName(), joinNode.getRightKeyFieldName(), joinNode.getLeftAlias(), joinNode.getRightAlias());
testJoinNode.tableForJoin(builder, ksqlConfig, kafkaTopicClient, functionRegistry, new HashMap<>(), new MockSchemaRegistryClient());
}
use of org.apache.kafka.streams.StreamsBuilder in project ksql by confluentinc.
the class KsqlStructuredDataOutputNodeTest method shouldCreateSinkWithCorrectCleanupPolicyStream.
@Test
public void shouldCreateSinkWithCorrectCleanupPolicyStream() {
KafkaTopicClient topicClientForWindowTable = EasyMock.mock(KafkaTopicClient.class);
StreamsBuilder streamsBuilder = new StreamsBuilder();
topicClientForWindowTable.createTopic("output", 4, (short) 3, Collections.emptyMap());
EasyMock.replay(topicClientForWindowTable);
SchemaKStream schemaKStream = outputNode.buildStream(streamsBuilder, ksqlConfig, topicClientForWindowTable, new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
assertThat(schemaKStream, instanceOf(SchemaKStream.class));
EasyMock.verify();
}
use of org.apache.kafka.streams.StreamsBuilder in project ksql by confluentinc.
the class SchemaKStreamTest method init.
@Before
public void init() {
functionRegistry = new FunctionRegistry();
ksqlStream = (KsqlStream) metaStore.getSource("TEST1");
StreamsBuilder builder = new StreamsBuilder();
kStream = builder.stream(ksqlStream.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), ksqlStream.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap()), false, new MockSchemaRegistryClient())));
}
use of org.apache.kafka.streams.StreamsBuilder in project ksql by confluentinc.
the class SchemaKTableTest method init.
@Before
public void init() {
functionRegistry = new FunctionRegistry();
ksqlTable = (KsqlTable) metaStore.getSource("TEST2");
StreamsBuilder builder = new StreamsBuilder();
kTable = builder.table(ksqlTable.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), ksqlTable.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap()), false, new MockSchemaRegistryClient())));
}
use of org.apache.kafka.streams.StreamsBuilder in project ksql by confluentinc.
the class KsqlStructuredDataOutputNode method buildStream.
@Override
public SchemaKStream buildStream(final StreamsBuilder builder, final KsqlConfig ksqlConfig, final KafkaTopicClient kafkaTopicClient, final FunctionRegistry functionRegistry, final Map<String, Object> props, final SchemaRegistryClient schemaRegistryClient) {
final SchemaKStream schemaKStream = getSource().buildStream(builder, ksqlConfig, kafkaTopicClient, functionRegistry, props, schemaRegistryClient);
final Set<Integer> rowkeyIndexes = SchemaUtil.getRowTimeRowKeyIndexes(getSchema());
final Builder outputNodeBuilder = Builder.from(this);
final Schema schema = SchemaUtil.removeImplicitRowTimeRowKeyFromSchema(getSchema());
outputNodeBuilder.withSchema(schema);
if (getTopicSerde() instanceof KsqlAvroTopicSerDe) {
addAvroSchemaToResultTopic(outputNodeBuilder);
}
final Map<String, Object> outputProperties = getOutputProperties();
if (outputProperties.containsKey(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY)) {
ksqlConfig.put(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY, outputProperties.get(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY));
}
if (outputProperties.containsKey(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY)) {
ksqlConfig.put(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY, outputProperties.get(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY));
}
final SchemaKStream result = createOutputStream(schemaKStream, outputNodeBuilder, functionRegistry, outputProperties, schemaRegistryClient);
final KsqlStructuredDataOutputNode noRowKey = outputNodeBuilder.build();
createSinkTopic(noRowKey.getKafkaTopicName(), ksqlConfig, kafkaTopicClient, shoulBeCompacted(result));
result.into(noRowKey.getKafkaTopicName(), noRowKey.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(noRowKey.getSchema(), ksqlConfig, false, schemaRegistryClient), rowkeyIndexes);
result.setOutputNode(outputNodeBuilder.withSchema(SchemaUtil.addImplicitRowTimeRowKeyToSchema(noRowKey.getSchema())).build());
return result;
}
Aggregations