use of io.confluent.ksql.parser.tree.StringLiteral in project ksql by confluentinc.
the class CommandFactoriesTest method shouldFailCreateTableIfKeyNameIsIncorrect.
@Test
public void shouldFailCreateTableIfKeyNameIsIncorrect() {
HashMap<String, Expression> tableProperties = new HashMap<>();
tableProperties.putAll(properties);
tableProperties.put(DdlConfig.KEY_NAME_PROPERTY, new StringLiteral("COL3"));
try {
final DdlCommand result = commandFactories.create(sqlExpression, new CreateTable(QualifiedName.of("foo"), Arrays.asList(new TableElement("COL1", "BIGINT"), new TableElement("COL2", "VARCHAR")), true, tableProperties), Collections.emptyMap());
} catch (KsqlException e) {
assertThat(e.getMessage(), equalTo("No column with the provided key column name in the " + "WITH clause, COL3, exists in the defined schema."));
}
}
use of io.confluent.ksql.parser.tree.StringLiteral in project ksql by confluentinc.
the class CommandFactoriesTest method before.
@Before
public void before() {
properties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral("JSON"));
properties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral("topic"));
EasyMock.expect(topicClient.isTopicExists(anyString())).andReturn(true);
EasyMock.replay(topicClient);
}
use of io.confluent.ksql.parser.tree.StringLiteral in project ksql by confluentinc.
the class CommandFactoriesTest method shouldCreateCommandForCreateTable.
@Test
public void shouldCreateCommandForCreateTable() {
HashMap<String, Expression> tableProperties = new HashMap<>();
tableProperties.putAll(properties);
tableProperties.put(DdlConfig.KEY_NAME_PROPERTY, new StringLiteral("COL1"));
final DdlCommand result = commandFactories.create(sqlExpression, new CreateTable(QualifiedName.of("foo"), Arrays.asList(new TableElement("COL1", "BIGINT"), new TableElement("COL2", "VARCHAR")), true, tableProperties), Collections.emptyMap());
assertThat(result, instanceOf(CreateTableCommand.class));
}
use of io.confluent.ksql.parser.tree.StringLiteral in project ksql by confluentinc.
the class AvroUtil method addAvroFields.
private AbstractStreamCreateStatement addAvroFields(final AbstractStreamCreateStatement abstractStreamCreateStatement, final Schema schema, int schemaId) {
List<TableElement> elements = new ArrayList<>();
for (Field field : schema.fields()) {
TableElement tableElement = new TableElement(field.name().toUpperCase(), SchemaUtil.getSqlTypeName(field.schema()));
elements.add(tableElement);
}
StringLiteral schemaIdLiteral = new StringLiteral(String.format("%d", schemaId));
Map<String, Expression> properties = new HashMap<>(abstractStreamCreateStatement.getProperties());
if (!abstractStreamCreateStatement.getProperties().containsKey(KsqlConstants.AVRO_SCHEMA_ID)) {
properties.put(KsqlConstants.AVRO_SCHEMA_ID, schemaIdLiteral);
}
return abstractStreamCreateStatement.copyWith(elements, properties);
}
use of io.confluent.ksql.parser.tree.StringLiteral in project ksql by confluentinc.
the class ExpressionFormatterTest method shouldFormatBetweenPredicate.
@Test
public void shouldFormatBetweenPredicate() {
final BetweenPredicate predicate = new BetweenPredicate(new StringLiteral("blah"), new LongLiteral("5"), new LongLiteral("10"));
assertThat(ExpressionFormatter.formatExpression(predicate), equalTo("('blah' BETWEEN 5 AND 10)"));
}
Aggregations