use of io.confluent.ksql.function.FunctionRegistry in project ksql by confluentinc.
the class JoinNodeTest method shouldBuildTableNodeWithCorrectAutoCommitOffsetPolicy.
@Test
public void shouldBuildTableNodeWithCorrectAutoCommitOffsetPolicy() {
setupTopicClientExpectations(1, 1);
buildJoin();
KsqlConfig ksqlConfig = mock(KsqlConfig.class);
KafkaTopicClient kafkaTopicClient = mock(KafkaTopicClient.class);
FunctionRegistry functionRegistry = mock(FunctionRegistry.class);
class RightTable extends PlanNode {
final Schema schema;
public RightTable(final PlanNodeId id, Schema schema) {
super(id);
this.schema = schema;
}
@Override
public Schema getSchema() {
return schema;
}
@Override
public Field getKeyField() {
return null;
}
@Override
public List<PlanNode> getSources() {
return null;
}
@Override
public SchemaKStream buildStream(StreamsBuilder builder, KsqlConfig ksqlConfig, KafkaTopicClient kafkaTopicClient, FunctionRegistry functionRegistry, Map<String, Object> props, SchemaRegistryClient schemaRegistryClient) {
if (props.containsKey(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG) && props.get(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG).toString().equalsIgnoreCase("EARLIEST")) {
return mock(SchemaKTable.class);
} else {
throw new KsqlException("auto.offset.reset should be set to EARLIEST.");
}
}
@Override
protected int getPartitions(KafkaTopicClient kafkaTopicClient) {
return 1;
}
}
RightTable rightTable = new RightTable(new PlanNodeId("1"), joinNode.getRight().getSchema());
JoinNode testJoinNode = new JoinNode(joinNode.getId(), joinNode.getType(), joinNode.getLeft(), rightTable, joinNode.getLeftKeyFieldName(), joinNode.getRightKeyFieldName(), joinNode.getLeftAlias(), joinNode.getRightAlias());
testJoinNode.tableForJoin(builder, ksqlConfig, kafkaTopicClient, functionRegistry, new HashMap<>(), new MockSchemaRegistryClient());
}
use of io.confluent.ksql.function.FunctionRegistry in project ksql by confluentinc.
the class KsqlBareOutputNodeTest method build.
private SchemaKStream build() {
final String simpleSelectFilter = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
final KsqlBareOutputNode planNode = (KsqlBareOutputNode) planBuilder.buildLogicalPlan(simpleSelectFilter);
return planNode.buildStream(builder, new KsqlConfig(Collections.emptyMap()), new FakeKafkaTopicClient(), new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
}
use of io.confluent.ksql.function.FunctionRegistry in project ksql by confluentinc.
the class KsqlStructuredDataOutputNodeTest method shouldCreateSinkWithCorrectCleanupPolicyStream.
@Test
public void shouldCreateSinkWithCorrectCleanupPolicyStream() {
KafkaTopicClient topicClientForWindowTable = EasyMock.mock(KafkaTopicClient.class);
StreamsBuilder streamsBuilder = new StreamsBuilder();
topicClientForWindowTable.createTopic("output", 4, (short) 3, Collections.emptyMap());
EasyMock.replay(topicClientForWindowTable);
SchemaKStream schemaKStream = outputNode.buildStream(streamsBuilder, ksqlConfig, topicClientForWindowTable, new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
assertThat(schemaKStream, instanceOf(SchemaKStream.class));
EasyMock.verify();
}
use of io.confluent.ksql.function.FunctionRegistry in project ksql by confluentinc.
the class AggregateAnalyzerTest method init.
@Before
public void init() {
metaStore = MetaStoreFixture.getNewMetaStore();
functionRegistry = new FunctionRegistry();
}
use of io.confluent.ksql.function.FunctionRegistry in project ksql by confluentinc.
the class CodeGenRunnerTest method init.
@Before
public void init() {
metaStore = MetaStoreFixture.getNewMetaStore();
functionRegistry = new FunctionRegistry();
schema = SchemaBuilder.struct().field("CODEGEN_TEST.COL0", SchemaBuilder.INT64_SCHEMA).field("CODEGEN_TEST.COL1", SchemaBuilder.STRING_SCHEMA).field("CODEGEN_TEST.COL2", SchemaBuilder.STRING_SCHEMA).field("CODEGEN_TEST.COL3", SchemaBuilder.FLOAT64_SCHEMA).field("CODEGEN_TEST.COL4", SchemaBuilder.FLOAT64_SCHEMA).field("CODEGEN_TEST.COL5", SchemaBuilder.INT32_SCHEMA).field("CODEGEN_TEST.COL6", SchemaBuilder.BOOLEAN_SCHEMA).field("CODEGEN_TEST.COL7", SchemaBuilder.BOOLEAN_SCHEMA).field("CODEGEN_TEST.COL8", SchemaBuilder.INT64_SCHEMA).field("CODEGEN_TEST.COL9", SchemaBuilder.array(SchemaBuilder.INT32_SCHEMA)).field("CODEGEN_TEST.COL10", SchemaBuilder.array(SchemaBuilder.INT32_SCHEMA)).field("CODEGEN_TEST.COL11", SchemaBuilder.map(SchemaBuilder.INT32_SCHEMA, SchemaBuilder.INT32_SCHEMA)).field("CODEGEN_TEST.COL12", SchemaBuilder.map(SchemaBuilder.INT32_SCHEMA, SchemaBuilder.INT32_SCHEMA));
Schema metaStoreSchema = SchemaBuilder.struct().field("COL0", SchemaBuilder.INT64_SCHEMA).field("COL1", SchemaBuilder.STRING_SCHEMA).field("COL2", SchemaBuilder.STRING_SCHEMA).field("COL3", SchemaBuilder.FLOAT64_SCHEMA).field("COL4", SchemaBuilder.FLOAT64_SCHEMA).field("COL5", SchemaBuilder.INT32_SCHEMA).field("COL6", SchemaBuilder.BOOLEAN_SCHEMA).field("COL7", SchemaBuilder.BOOLEAN_SCHEMA).field("COL8", SchemaBuilder.INT64_SCHEMA).field("COL9", SchemaBuilder.array(SchemaBuilder.INT32_SCHEMA)).field("COL10", SchemaBuilder.array(SchemaBuilder.INT32_SCHEMA)).field("COL11", SchemaBuilder.map(SchemaBuilder.INT32_SCHEMA, SchemaBuilder.INT32_SCHEMA)).field("COL12", SchemaBuilder.map(SchemaBuilder.INT32_SCHEMA, SchemaBuilder.INT32_SCHEMA));
KsqlTopic ksqlTopic = new KsqlTopic("CODEGEN_TEST", "codegen_test", new KsqlJsonTopicSerDe());
KsqlStream ksqlStream = new KsqlStream("sqlexpression", "CODEGEN_TEST", metaStoreSchema, metaStoreSchema.field("COL0"), null, ksqlTopic);
metaStore.putTopic(ksqlTopic);
metaStore.putSource(ksqlStream);
codeGenRunner = new CodeGenRunner(schema, functionRegistry);
}
Aggregations