use of io.confluent.ksql.parser.tree.StringLiteral in project ksql by confluentinc.
the class ExpressionFormatterTest method shouldFormatFunctionCallWithCount.
@Test
public void shouldFormatFunctionCallWithCount() {
final FunctionCall functionCall = new FunctionCall(QualifiedName.of("function", "COUNT"), Collections.singletonList(new StringLiteral("name")));
assertThat(ExpressionFormatter.formatExpression(functionCall), equalTo("function.COUNT('name')"));
}
use of io.confluent.ksql.parser.tree.StringLiteral in project ksql by confluentinc.
the class ExpressionFormatterTest method shouldFormatFunctionCallWithWindow.
@Test
public void shouldFormatFunctionCallWithWindow() {
final FunctionCall functionCall = new FunctionCall(new NodeLocation(1, 1), QualifiedName.of("function"), Optional.of(new Window("window", new WindowExpression("blah", new TumblingWindowExpression(1L, TimeUnit.SECONDS)))), false, Collections.singletonList(new StringLiteral("name")));
assertThat(ExpressionFormatter.formatExpression(functionCall), equalTo("function('name') OVER WINDOW WINDOW blah TUMBLING ( SIZE 1 SECONDS ) "));
}
use of io.confluent.ksql.parser.tree.StringLiteral in project ksql by confluentinc.
the class AstBuilder method visitNormalize.
@Override
public Node visitNormalize(SqlBaseParser.NormalizeContext context) {
Expression str = (Expression) visit(context.valueExpression());
String normalForm = Optional.ofNullable(context.normalForm()).map(ParserRuleContext::getText).orElse("NFC");
return new FunctionCall(getLocation(context), QualifiedName.of("NORMALIZE"), ImmutableList.of(str, new StringLiteral(getLocation(context), normalForm)));
}
use of io.confluent.ksql.parser.tree.StringLiteral in project ksql by confluentinc.
the class QueryEngine method maybeAddFieldsFromSchemaRegistry.
private Pair<DdlStatement, String> maybeAddFieldsFromSchemaRegistry(AbstractStreamCreateStatement streamCreateStatement) {
if (streamCreateStatement.getProperties().containsKey(DdlConfig.TOPIC_NAME_PROPERTY)) {
String ksqlRegisteredTopicName = StringUtil.cleanQuotes(streamCreateStatement.getProperties().get(DdlConfig.TOPIC_NAME_PROPERTY).toString().toUpperCase());
KsqlTopic ksqlTopic = ksqlEngine.getMetaStore().getTopic(ksqlRegisteredTopicName);
if (ksqlTopic == null) {
throw new KsqlException(String.format("Could not find %s topic in the metastore.", ksqlRegisteredTopicName));
}
Map<String, Expression> newProperties = new HashMap<>();
newProperties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral(ksqlTopic.getKafkaTopicName()));
newProperties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral(ksqlTopic.getKsqlTopicSerDe().getSerDe().toString()));
streamCreateStatement = streamCreateStatement.copyWith(streamCreateStatement.getElements(), newProperties);
}
Pair<AbstractStreamCreateStatement, String> avroCheckResult = new AvroUtil().checkAndSetAvroSchema(streamCreateStatement, new HashMap<>(), ksqlEngine.getSchemaRegistryClient());
if (avroCheckResult.getRight() != null) {
if (avroCheckResult.getLeft() instanceof CreateStream) {
return new Pair<>((CreateStream) avroCheckResult.getLeft(), avroCheckResult.getRight());
} else if (avroCheckResult.getLeft() instanceof CreateTable) {
return new Pair<>((CreateTable) avroCheckResult.getLeft(), avroCheckResult.getRight());
}
}
return new Pair<>(null, null);
}
use of io.confluent.ksql.parser.tree.StringLiteral in project ksql by confluentinc.
the class CommandFactoriesTest method shouldFailCreateTableIfTimestampColumnNameIsIncorrect.
@Test
public void shouldFailCreateTableIfTimestampColumnNameIsIncorrect() {
HashMap<String, Expression> tableProperties = new HashMap<>();
tableProperties.putAll(properties);
tableProperties.put(DdlConfig.TIMESTAMP_NAME_PROPERTY, new StringLiteral("COL3"));
try {
final DdlCommand result = commandFactories.create(sqlExpression, new CreateTable(QualifiedName.of("foo"), Arrays.asList(new TableElement("COL1", "BIGINT"), new TableElement("COL2", "VARCHAR")), true, tableProperties), Collections.emptyMap());
} catch (KsqlException e) {
assertThat(e.getMessage(), equalTo("No column with the provided timestamp column name in the WITH clause, COL3, exists in the defined schema."));
}
}
Aggregations