use of io.confluent.ksql.metastore.KsqlTopic in project ksql by confluentinc.
the class QueryEngine method maybeAddFieldsFromSchemaRegistry.
private Pair<DdlStatement, String> maybeAddFieldsFromSchemaRegistry(AbstractStreamCreateStatement streamCreateStatement) {
if (streamCreateStatement.getProperties().containsKey(DdlConfig.TOPIC_NAME_PROPERTY)) {
String ksqlRegisteredTopicName = StringUtil.cleanQuotes(streamCreateStatement.getProperties().get(DdlConfig.TOPIC_NAME_PROPERTY).toString().toUpperCase());
KsqlTopic ksqlTopic = ksqlEngine.getMetaStore().getTopic(ksqlRegisteredTopicName);
if (ksqlTopic == null) {
throw new KsqlException(String.format("Could not find %s topic in the metastore.", ksqlRegisteredTopicName));
}
Map<String, Expression> newProperties = new HashMap<>();
newProperties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral(ksqlTopic.getKafkaTopicName()));
newProperties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral(ksqlTopic.getKsqlTopicSerDe().getSerDe().toString()));
streamCreateStatement = streamCreateStatement.copyWith(streamCreateStatement.getElements(), newProperties);
}
Pair<AbstractStreamCreateStatement, String> avroCheckResult = new AvroUtil().checkAndSetAvroSchema(streamCreateStatement, new HashMap<>(), ksqlEngine.getSchemaRegistryClient());
if (avroCheckResult.getRight() != null) {
if (avroCheckResult.getLeft() instanceof CreateStream) {
return new Pair<>((CreateStream) avroCheckResult.getLeft(), avroCheckResult.getRight());
} else if (avroCheckResult.getLeft() instanceof CreateTable) {
return new Pair<>((CreateTable) avroCheckResult.getLeft(), avroCheckResult.getRight());
}
}
return new Pair<>(null, null);
}
use of io.confluent.ksql.metastore.KsqlTopic in project ksql by confluentinc.
the class QueryEngine method getResultDatasource.
StructuredDataSource getResultDatasource(final Select select, final String name) {
SchemaBuilder dataSource = SchemaBuilder.struct().name(name);
for (SelectItem selectItem : select.getSelectItems()) {
if (selectItem instanceof SingleColumn) {
SingleColumn singleColumn = (SingleColumn) selectItem;
String fieldName = singleColumn.getAlias().get();
dataSource = dataSource.field(fieldName, Schema.BOOLEAN_SCHEMA);
}
}
KsqlTopic ksqlTopic = new KsqlTopic(name, name, null);
return new KsqlStream("QueryEngine-DDLCommand-Not-Needed", name, dataSource.schema(), null, null, ksqlTopic);
}
use of io.confluent.ksql.metastore.KsqlTopic in project ksql by confluentinc.
the class Analyzer method analyzeNonStdOutSink.
private void analyzeNonStdOutSink() {
List<Pair<StructuredDataSource, String>> fromDataSources = analysis.getFromDataSources();
StructuredDataSource intoStructuredDataSource = analysis.getInto();
String intoKafkaTopicName = analysis.getIntoKafkaTopicName();
if (intoKafkaTopicName == null) {
intoKafkaTopicName = intoStructuredDataSource.getName();
}
KsqlTopicSerDe intoTopicSerde = fromDataSources.get(0).getLeft().getKsqlTopic().getKsqlTopicSerDe();
if (analysis.getIntoFormat() != null) {
switch(analysis.getIntoFormat().toUpperCase()) {
case DataSource.AVRO_SERDE_NAME:
intoTopicSerde = new KsqlAvroTopicSerDe();
break;
case DataSource.JSON_SERDE_NAME:
intoTopicSerde = new KsqlJsonTopicSerDe();
break;
case DataSource.DELIMITED_SERDE_NAME:
intoTopicSerde = new KsqlDelimitedTopicSerDe();
break;
default:
throw new KsqlException(String.format("Unsupported format: %s", analysis.getIntoFormat()));
}
} else {
if (intoTopicSerde instanceof KsqlAvroTopicSerDe) {
intoTopicSerde = new KsqlAvroTopicSerDe();
}
}
KsqlTopic newIntoKsqlTopic = new KsqlTopic(intoKafkaTopicName, intoKafkaTopicName, intoTopicSerde);
KsqlStream intoKsqlStream = new KsqlStream(sqlExpression, intoStructuredDataSource.getName(), null, null, null, newIntoKsqlTopic);
analysis.setInto(intoKsqlStream);
}
use of io.confluent.ksql.metastore.KsqlTopic in project ksql by confluentinc.
the class KsqlStructuredDataOutputNode method addAvroSchemaToResultTopic.
private void addAvroSchemaToResultTopic(final Builder builder) {
final KsqlAvroTopicSerDe ksqlAvroTopicSerDe = new KsqlAvroTopicSerDe();
builder.withKsqlTopic(new KsqlTopic(getKsqlTopic().getName(), getKsqlTopic().getKafkaTopicName(), ksqlAvroTopicSerDe));
}
use of io.confluent.ksql.metastore.KsqlTopic in project ksql by confluentinc.
the class KafkaTopicsListTest method shouldBuildValidTopicList.
@Test
public void shouldBuildValidTopicList() {
Collection<KsqlTopic> ksqlTopics = Collections.emptyList();
// represent the full list of topics
Map<String, TopicDescription> topicDescriptions = new HashMap<>();
TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(1, new Node(1, "", 8088), Collections.emptyList(), Collections.emptyList());
topicDescriptions.put("test-topic", new TopicDescription("test-topic", false, Collections.singletonList(topicPartitionInfo)));
/**
* Return POJO for consumerGroupClient
*/
TopicPartition topicPartition = new TopicPartition("test-topic", 1);
KafkaConsumerGroupClientImpl.ConsumerSummary consumerSummary = new KafkaConsumerGroupClientImpl.ConsumerSummary("consumer-id");
consumerSummary.addPartition(topicPartition);
KafkaConsumerGroupClientImpl.ConsumerGroupSummary consumerGroupSummary = new KafkaConsumerGroupClientImpl.ConsumerGroupSummary();
consumerGroupSummary.addConsumerSummary(consumerSummary);
KafkaConsumerGroupClient consumerGroupClient = mock(KafkaConsumerGroupClient.class);
expect(consumerGroupClient.listGroups()).andReturn(Collections.singletonList("test-topic"));
expect(consumerGroupClient.describeConsumerGroup("test-topic")).andReturn(consumerGroupSummary);
replay(consumerGroupClient);
/**
* Test
*/
KafkaTopicsList topicsList = KafkaTopicsList.build("statement test", ksqlTopics, topicDescriptions, new KsqlConfig(Collections.EMPTY_MAP), consumerGroupClient);
assertThat(topicsList.getTopics().size(), equalTo(1));
KafkaTopicInfo first = topicsList.getTopics().iterator().next();
assertThat(first.getConsumerGroupCount(), equalTo(1));
assertThat(first.getConsumerCount(), equalTo(1));
assertThat(first.getReplicaInfo().size(), equalTo(1));
}
Aggregations