use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.
the class Analyzer method analyzeNonStdOutSink.
private void analyzeNonStdOutSink() {
List<Pair<StructuredDataSource, String>> fromDataSources = analysis.getFromDataSources();
StructuredDataSource intoStructuredDataSource = analysis.getInto();
String intoKafkaTopicName = analysis.getIntoKafkaTopicName();
if (intoKafkaTopicName == null) {
intoKafkaTopicName = intoStructuredDataSource.getName();
}
KsqlTopicSerDe intoTopicSerde = fromDataSources.get(0).getLeft().getKsqlTopic().getKsqlTopicSerDe();
if (analysis.getIntoFormat() != null) {
switch(analysis.getIntoFormat().toUpperCase()) {
case DataSource.AVRO_SERDE_NAME:
intoTopicSerde = new KsqlAvroTopicSerDe();
break;
case DataSource.JSON_SERDE_NAME:
intoTopicSerde = new KsqlJsonTopicSerDe();
break;
case DataSource.DELIMITED_SERDE_NAME:
intoTopicSerde = new KsqlDelimitedTopicSerDe();
break;
default:
throw new KsqlException(String.format("Unsupported format: %s", analysis.getIntoFormat()));
}
} else {
if (intoTopicSerde instanceof KsqlAvroTopicSerDe) {
intoTopicSerde = new KsqlAvroTopicSerDe();
}
}
KsqlTopic newIntoKsqlTopic = new KsqlTopic(intoKafkaTopicName, intoKafkaTopicName, intoTopicSerde);
KsqlStream intoKsqlStream = new KsqlStream(sqlExpression, intoStructuredDataSource.getName(), null, null, null, newIntoKsqlTopic);
analysis.setInto(intoKsqlStream);
}
use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.
the class Analyzer method visitTable.
@Override
protected Node visitTable(final Table node, final AnalysisContext context) {
StructuredDataSource into;
if (node.isStdOut) {
into = new KsqlStdOut(KsqlStdOut.KSQL_STDOUT_NAME, null, null, null, StructuredDataSource.DataSourceType.KSTREAM);
} else if (context.getParentType() == AnalysisContext.ParentType.INTO) {
into = analyzeNonStdOutTable(node);
} else {
throw new KsqlException("INTO clause is not set correctly!");
}
analysis.setInto(into);
return null;
}
use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.
the class Analyzer method analyzeNonStdOutTable.
private StructuredDataSource analyzeNonStdOutTable(final Table node) {
StructuredDataSource into = new KsqlStream(sqlExpression, node.getName().getSuffix(), null, null, null, null);
setIntoProperties(into, node);
return into;
}
use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.
the class Analyzer method visitAliasedRelation.
@Override
protected Node visitAliasedRelation(AliasedRelation node, AnalysisContext context) {
String structuredDataSourceName = ((Table) node.getRelation()).getName().getSuffix();
if (metaStore.getSource(structuredDataSourceName) == null) {
throw new KsqlException(structuredDataSourceName + " does not exist.");
}
StructuredDataSource structuredDataSource = metaStore.getSource(structuredDataSourceName);
if (((Table) node.getRelation()).getProperties() != null) {
if (((Table) node.getRelation()).getProperties().get(DdlConfig.TIMESTAMP_NAME_PROPERTY) != null) {
String timestampFieldName = ((Table) node.getRelation()).getProperties().get(DdlConfig.TIMESTAMP_NAME_PROPERTY).toString().toUpperCase();
if (!timestampFieldName.startsWith("'") && !timestampFieldName.endsWith("'")) {
throw new KsqlException("Property name should be String with single qoute.");
}
timestampFieldName = timestampFieldName.substring(1, timestampFieldName.length() - 1);
structuredDataSource = structuredDataSource.cloneWithTimeField(timestampFieldName);
}
}
Pair<StructuredDataSource, String> fromDataSource = new Pair<>(structuredDataSource, node.getAlias());
analysis.addDataSource(fromDataSource);
return node;
}
use of io.confluent.ksql.metastore.StructuredDataSource in project ksql by confluentinc.
the class DropSourceCommand method run.
@Override
public DdlCommandResult run(MetaStore metaStore) {
StructuredDataSource dataSource = metaStore.getSource(sourceName);
if (dataSource == null) {
throw new KsqlException("Source " + sourceName + " does not exist.");
}
if (dataSource.getDataSourceType() != dataSourceType) {
throw new KsqlException(String.format("Incompatible data source type is %s, but statement was DROP %s", dataSource.getDataSourceType() == DataSource.DataSourceType.KSTREAM ? "STREAM" : "TABLE", dataSourceType == DataSource.DataSourceType.KSTREAM ? "STREAM" : "TABLE"));
}
DropTopicCommand dropTopicCommand = new DropTopicCommand(dataSource.getKsqlTopic().getTopicName());
dropTopicCommand.run(metaStore);
metaStore.deleteSource(sourceName);
queryTerminator.terminateQueryForEntity(sourceName);
return new DdlCommandResult(true, "Source " + sourceName + " was dropped");
}
Aggregations