use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class SqlPredicateTest method testFilter.
@Test
public void testFilter() throws Exception {
String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
PlanNode logicalPlan = buildLogicalPlan(selectQuery);
FilterNode filterNode = (FilterNode) logicalPlan.getSources().get(0).getSources().get(0);
initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream, ksqlStream.getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
SqlPredicate predicate = new SqlPredicate(filterNode.getPredicate(), initialSchemaKStream.getSchema(), false, functionRegistry);
Assert.assertTrue(predicate.getFilterExpression().toString().equalsIgnoreCase("(TEST1.COL0 > 100)"));
Assert.assertTrue(predicate.getColumnIndexes().length == 1);
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class SqlPredicateTest method testFilterBiggerExpression.
@Test
public void testFilterBiggerExpression() throws Exception {
String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100 AND LEN(col2) = 5;";
PlanNode logicalPlan = buildLogicalPlan(selectQuery);
FilterNode filterNode = (FilterNode) logicalPlan.getSources().get(0).getSources().get(0);
initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream, ksqlStream.getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
SqlPredicate predicate = new SqlPredicate(filterNode.getPredicate(), initialSchemaKStream.getSchema(), false, functionRegistry);
Assert.assertTrue(predicate.getFilterExpression().toString().equalsIgnoreCase("((TEST1.COL0 > 100) AND" + " (LEN(TEST1.COL2) = 5))"));
Assert.assertTrue(predicate.getColumnIndexes().length == 3);
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class SqlPredicateTest method init.
@Before
public void init() {
metaStore = MetaStoreFixture.getNewMetaStore();
functionRegistry = new FunctionRegistry();
ksqlStream = (KsqlStream) metaStore.getSource("TEST1");
StreamsBuilder builder = new StreamsBuilder();
kStream = builder.stream(ksqlStream.getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), ksqlStream.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(null, new KsqlConfig(Collections.emptyMap()), false, new MockSchemaRegistryClient())));
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class KsqlStructuredDataOutputNodeTest method shouldCreateSinkWithCorrectCleanupPolicyNonWindowedTable.
@Test
public void shouldCreateSinkWithCorrectCleanupPolicyNonWindowedTable() {
KafkaTopicClient topicClientForNonWindowTable = EasyMock.mock(KafkaTopicClient.class);
KsqlStructuredDataOutputNode outputNode = getKsqlStructuredDataOutputNode(false);
StreamsBuilder streamsBuilder = new StreamsBuilder();
Map<String, String> topicConfig = ImmutableMap.of(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT);
topicClientForNonWindowTable.createTopic("output", 4, (short) 3, topicConfig);
EasyMock.replay(topicClientForNonWindowTable);
SchemaKStream schemaKStream = outputNode.buildStream(streamsBuilder, ksqlConfig, topicClientForNonWindowTable, new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
assertThat(schemaKStream, instanceOf(SchemaKTable.class));
EasyMock.verify();
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class KsqlStructuredDataOutputNodeTest method shouldCreateSinkWithCorrectCleanupPolicyWindowedTable.
@Test
public void shouldCreateSinkWithCorrectCleanupPolicyWindowedTable() {
KafkaTopicClient topicClientForWindowTable = EasyMock.mock(KafkaTopicClient.class);
KsqlStructuredDataOutputNode outputNode = getKsqlStructuredDataOutputNode(true);
StreamsBuilder streamsBuilder = new StreamsBuilder();
topicClientForWindowTable.createTopic("output", 4, (short) 3, Collections.emptyMap());
EasyMock.replay(topicClientForWindowTable);
SchemaKStream schemaKStream = outputNode.buildStream(streamsBuilder, ksqlConfig, topicClientForWindowTable, new FunctionRegistry(), new HashMap<>(), new MockSchemaRegistryClient());
assertThat(schemaKStream, instanceOf(SchemaKTable.class));
EasyMock.verify();
}
Aggregations