use of io.confluent.ksql.parser.tree.TableElement in project ksql by confluentinc.
the class CommandFactoriesTest method shouldFailCreateTableIfKeyNameIsIncorrect.
@Test
public void shouldFailCreateTableIfKeyNameIsIncorrect() {
HashMap<String, Expression> tableProperties = new HashMap<>();
tableProperties.putAll(properties);
tableProperties.put(DdlConfig.KEY_NAME_PROPERTY, new StringLiteral("COL3"));
try {
final DdlCommand result = commandFactories.create(sqlExpression, new CreateTable(QualifiedName.of("foo"), Arrays.asList(new TableElement("COL1", "BIGINT"), new TableElement("COL2", "VARCHAR")), true, tableProperties), Collections.emptyMap());
} catch (KsqlException e) {
assertThat(e.getMessage(), equalTo("No column with the provided key column name in the " + "WITH clause, COL3, exists in the defined schema."));
}
}
use of io.confluent.ksql.parser.tree.TableElement in project ksql by confluentinc.
the class CommandFactoriesTest method shouldCreateCommandForCreateTable.
@Test
public void shouldCreateCommandForCreateTable() {
HashMap<String, Expression> tableProperties = new HashMap<>();
tableProperties.putAll(properties);
tableProperties.put(DdlConfig.KEY_NAME_PROPERTY, new StringLiteral("COL1"));
final DdlCommand result = commandFactories.create(sqlExpression, new CreateTable(QualifiedName.of("foo"), Arrays.asList(new TableElement("COL1", "BIGINT"), new TableElement("COL2", "VARCHAR")), true, tableProperties), Collections.emptyMap());
assertThat(result, instanceOf(CreateTableCommand.class));
}
use of io.confluent.ksql.parser.tree.TableElement in project ksql by confluentinc.
the class AvroUtil method addAvroFields.
private AbstractStreamCreateStatement addAvroFields(final AbstractStreamCreateStatement abstractStreamCreateStatement, final Schema schema, int schemaId) {
List<TableElement> elements = new ArrayList<>();
for (Field field : schema.fields()) {
TableElement tableElement = new TableElement(field.name().toUpperCase(), SchemaUtil.getSqlTypeName(field.schema()));
elements.add(tableElement);
}
StringLiteral schemaIdLiteral = new StringLiteral(String.format("%d", schemaId));
Map<String, Expression> properties = new HashMap<>(abstractStreamCreateStatement.getProperties());
if (!abstractStreamCreateStatement.getProperties().containsKey(KsqlConstants.AVRO_SCHEMA_ID)) {
properties.put(KsqlConstants.AVRO_SCHEMA_ID, schemaIdLiteral);
}
return abstractStreamCreateStatement.copyWith(elements, properties);
}
use of io.confluent.ksql.parser.tree.TableElement in project ksql by confluentinc.
the class AvroUtilTest method shouldPassAvroCheck.
@Test
public void shouldPassAvroCheck() throws Exception {
SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
SchemaMetadata schemaMetadata = new SchemaMetadata(1, 1, ordersAveroSchemaStr);
expect(schemaRegistryClient.getLatestSchemaMetadata(anyString())).andReturn(schemaMetadata);
replay(schemaRegistryClient);
AbstractStreamCreateStatement abstractStreamCreateStatement = getAbstractStreamCreateStatement("CREATE STREAM S1 WITH " + "(kafka_topic='s1_topic', " + "value_format='avro' );");
Pair<AbstractStreamCreateStatement, String> checkResult = avroUtil.checkAndSetAvroSchema(abstractStreamCreateStatement, new HashMap<>(), schemaRegistryClient);
AbstractStreamCreateStatement newAbstractStreamCreateStatement = checkResult.getLeft();
assertThat(newAbstractStreamCreateStatement.getElements(), equalTo(Arrays.asList(new TableElement("ORDERTIME", "BIGINT"), new TableElement("ORDERID", "BIGINT"), new TableElement("ITEMID", "VARCHAR"), new TableElement("ORDERUNITS", "DOUBLE"), new TableElement("ARRAYCOL", "ARRAY<DOUBLE>"), new TableElement("MAPCOL", "MAP<VARCHAR,DOUBLE>"))));
}
use of io.confluent.ksql.parser.tree.TableElement in project ksql by confluentinc.
the class KsqlRestApplication method buildApplication.
public static KsqlRestApplication buildApplication(KsqlRestConfig restConfig, boolean isUiEnabled, VersionCheckerAgent versionCheckerAgent) throws Exception {
Map<String, Object> ksqlConfProperties = new HashMap<>();
ksqlConfProperties.putAll(restConfig.getKsqlConfigProperties());
KsqlConfig ksqlConfig = new KsqlConfig(ksqlConfProperties);
adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
KsqlEngine ksqlEngine = new KsqlEngine(ksqlConfig, new KafkaTopicClientImpl(adminClient));
KafkaTopicClient topicClient = ksqlEngine.getTopicClient();
final String kafkaClusterId;
try {
kafkaClusterId = adminClient.describeCluster().clusterId().get();
} catch (final UnsupportedVersionException e) {
throw new KsqlException("The kafka brokers are incompatible with. " + "KSQL requires broker versions >= 0.10.1.x");
}
String commandTopic = restConfig.getCommandTopic(ksqlConfig.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG));
ensureCommandTopic(restConfig, topicClient, commandTopic);
Map<String, Expression> commandTopicProperties = new HashMap<>();
commandTopicProperties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral("json"));
commandTopicProperties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral(commandTopic));
ksqlEngine.getDdlCommandExec().execute(new RegisterTopicCommand(new RegisterTopic(QualifiedName.of(COMMANDS_KSQL_TOPIC_NAME), false, commandTopicProperties)));
ksqlEngine.getDdlCommandExec().execute(new CreateStreamCommand("statementText", new CreateStream(QualifiedName.of(COMMANDS_STREAM_NAME), Collections.singletonList(new TableElement("STATEMENT", "STRING")), false, Collections.singletonMap(DdlConfig.TOPIC_NAME_PROPERTY, new StringLiteral(COMMANDS_KSQL_TOPIC_NAME))), Collections.emptyMap(), ksqlEngine.getTopicClient(), true));
Map<String, Object> commandConsumerProperties = restConfig.getCommandConsumerProperties();
KafkaConsumer<CommandId, Command> commandConsumer = new KafkaConsumer<>(commandConsumerProperties, getJsonDeserializer(CommandId.class, true), getJsonDeserializer(Command.class, false));
KafkaProducer<CommandId, Command> commandProducer = new KafkaProducer<>(restConfig.getCommandProducerProperties(), getJsonSerializer(true), getJsonSerializer(false));
CommandStore commandStore = new CommandStore(commandTopic, commandConsumer, commandProducer, new CommandIdAssigner(ksqlEngine.getMetaStore()));
StatementParser statementParser = new StatementParser(ksqlEngine);
StatementExecutor statementExecutor = new StatementExecutor(ksqlEngine, statementParser);
CommandRunner commandRunner = new CommandRunner(statementExecutor, commandStore);
RootDocument rootDocument = new RootDocument(isUiEnabled, restConfig.getList(RestConfig.LISTENERS_CONFIG).get(0));
StatusResource statusResource = new StatusResource(statementExecutor);
StreamedQueryResource streamedQueryResource = new StreamedQueryResource(ksqlEngine, statementParser, restConfig.getLong(KsqlRestConfig.STREAMED_QUERY_DISCONNECT_CHECK_MS_CONFIG));
KsqlResource ksqlResource = new KsqlResource(ksqlEngine, commandStore, statementExecutor, restConfig.getLong(KsqlRestConfig.DISTRIBUTED_COMMAND_RESPONSE_TIMEOUT_MS_CONFIG));
commandRunner.processPriorCommands();
return new KsqlRestApplication(ksqlEngine, restConfig, commandRunner, rootDocument, statusResource, streamedQueryResource, ksqlResource, isUiEnabled, versionCheckerAgent, new ServerInfo(Version.getVersion(), kafkaClusterId));
}
Aggregations