use of io.confluent.ksql.rest.server.computation.CommandId in project ksql by confluentinc.
the class KsqlResource method distributeStatement.
private CommandStatusEntity distributeStatement(String statementText, Statement statement, Map<String, Object> streamsProperties) throws KsqlException {
CommandId commandId = commandStore.distributeStatement(statementText, statement, streamsProperties);
CommandStatus commandStatus;
try {
commandStatus = statementExecutor.registerQueuedStatement(commandId).get(distributedCommandResponseTimeout, TimeUnit.MILLISECONDS);
} catch (TimeoutException exception) {
log.warn("Timeout to get commandStatus, waited {} milliseconds:, statementText:" + statementText, distributedCommandResponseTimeout, exception);
commandStatus = statementExecutor.getStatus(commandId).get();
} catch (Exception e) {
throw new KsqlException(String.format("Could not write the statement '%s' into the command " + "topic.", statementText), e);
}
return new CommandStatusEntity(statementText, commandId, commandStatus);
}
use of io.confluent.ksql.rest.server.computation.CommandId in project ksql by confluentinc.
the class KsqlRestApplication method buildApplication.
public static KsqlRestApplication buildApplication(KsqlRestConfig restConfig, boolean isUiEnabled, VersionCheckerAgent versionCheckerAgent) throws Exception {
Map<String, Object> ksqlConfProperties = new HashMap<>();
ksqlConfProperties.putAll(restConfig.getKsqlConfigProperties());
KsqlConfig ksqlConfig = new KsqlConfig(ksqlConfProperties);
adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
KsqlEngine ksqlEngine = new KsqlEngine(ksqlConfig, new KafkaTopicClientImpl(adminClient));
KafkaTopicClient topicClient = ksqlEngine.getTopicClient();
final String kafkaClusterId;
try {
kafkaClusterId = adminClient.describeCluster().clusterId().get();
} catch (final UnsupportedVersionException e) {
throw new KsqlException("The kafka brokers are incompatible with. " + "KSQL requires broker versions >= 0.10.1.x");
}
String commandTopic = restConfig.getCommandTopic(ksqlConfig.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG));
ensureCommandTopic(restConfig, topicClient, commandTopic);
Map<String, Expression> commandTopicProperties = new HashMap<>();
commandTopicProperties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral("json"));
commandTopicProperties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral(commandTopic));
ksqlEngine.getDdlCommandExec().execute(new RegisterTopicCommand(new RegisterTopic(QualifiedName.of(COMMANDS_KSQL_TOPIC_NAME), false, commandTopicProperties)));
ksqlEngine.getDdlCommandExec().execute(new CreateStreamCommand("statementText", new CreateStream(QualifiedName.of(COMMANDS_STREAM_NAME), Collections.singletonList(new TableElement("STATEMENT", "STRING")), false, Collections.singletonMap(DdlConfig.TOPIC_NAME_PROPERTY, new StringLiteral(COMMANDS_KSQL_TOPIC_NAME))), Collections.emptyMap(), ksqlEngine.getTopicClient(), true));
Map<String, Object> commandConsumerProperties = restConfig.getCommandConsumerProperties();
KafkaConsumer<CommandId, Command> commandConsumer = new KafkaConsumer<>(commandConsumerProperties, getJsonDeserializer(CommandId.class, true), getJsonDeserializer(Command.class, false));
KafkaProducer<CommandId, Command> commandProducer = new KafkaProducer<>(restConfig.getCommandProducerProperties(), getJsonSerializer(true), getJsonSerializer(false));
CommandStore commandStore = new CommandStore(commandTopic, commandConsumer, commandProducer, new CommandIdAssigner(ksqlEngine.getMetaStore()));
StatementParser statementParser = new StatementParser(ksqlEngine);
StatementExecutor statementExecutor = new StatementExecutor(ksqlEngine, statementParser);
CommandRunner commandRunner = new CommandRunner(statementExecutor, commandStore);
RootDocument rootDocument = new RootDocument(isUiEnabled, restConfig.getList(RestConfig.LISTENERS_CONFIG).get(0));
StatusResource statusResource = new StatusResource(statementExecutor);
StreamedQueryResource streamedQueryResource = new StreamedQueryResource(ksqlEngine, statementParser, restConfig.getLong(KsqlRestConfig.STREAMED_QUERY_DISCONNECT_CHECK_MS_CONFIG));
KsqlResource ksqlResource = new KsqlResource(ksqlEngine, commandStore, statementExecutor, restConfig.getLong(KsqlRestConfig.DISTRIBUTED_COMMAND_RESPONSE_TIMEOUT_MS_CONFIG));
commandRunner.processPriorCommands();
return new KsqlRestApplication(ksqlEngine, restConfig, commandRunner, rootDocument, statusResource, streamedQueryResource, ksqlResource, isUiEnabled, versionCheckerAgent, new ServerInfo(Version.getVersion(), kafkaClusterId));
}
use of io.confluent.ksql.rest.server.computation.CommandId in project ksql by confluentinc.
the class MockStatusResource method getAllStatuses.
@GET
public Response getAllStatuses() {
Map<CommandId, CommandStatus.Status> statuses = new HashMap<>();
statuses.put(new CommandId(CommandId.Type.TOPIC, "c1", CommandId.Action.CREATE), CommandStatus.Status.SUCCESS);
statuses.put(new CommandId(CommandId.Type.TOPIC, "c2", CommandId.Action.CREATE), CommandStatus.Status.ERROR);
CommandStatuses commandStatuses = new CommandStatuses(statuses);
return Response.ok(commandStatuses).build();
}
use of io.confluent.ksql.rest.server.computation.CommandId in project ksql by confluentinc.
the class StatusResourceTest method testGetStatus.
@Test
public void testGetStatus() throws Exception {
StatusResource testResource = getTestStatusResource();
for (Map.Entry<CommandId, CommandStatus> commandEntry : mockCommandStatuses.entrySet()) {
CommandId commandId = commandEntry.getKey();
CommandStatus expectedCommandStatus = commandEntry.getValue();
Object statusEntity = testResource.getStatus(commandId.getType().name(), commandId.getEntity(), commandId.getAction().name()).getEntity();
assertThat(statusEntity, instanceOf(CommandStatus.class));
CommandStatus testCommandStatus = (CommandStatus) statusEntity;
assertEquals(expectedCommandStatus, testCommandStatus);
}
}
use of io.confluent.ksql.rest.server.computation.CommandId in project ksql by confluentinc.
the class KsqlResourceTest method testInstantRegisterTopic.
@Test
public void testInstantRegisterTopic() throws Exception {
KsqlResource testResource = TestKsqlResourceUtil.get(ksqlEngine, ksqlRestConfig);
final String ksqlTopic = "FOO";
final String kafkaTopic = "bar";
final String format = "json";
final String ksqlString = String.format("REGISTER TOPIC %s WITH (kafka_topic='%s', value_format='%s');", ksqlTopic, kafkaTopic, format);
final Map<String, Expression> createTopicProperties = new HashMap<>();
createTopicProperties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral(kafkaTopic));
createTopicProperties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral(format));
final RegisterTopic ksqlStatement = new RegisterTopic(QualifiedName.of(ksqlTopic), false, createTopicProperties);
final CommandId commandId = new CommandId(CommandId.Type.TOPIC, ksqlTopic, CommandId.Action.CREATE);
final CommandStatus commandStatus = new CommandStatus(CommandStatus.Status.QUEUED, "Statement written to command topic");
final CommandStatusEntity expectedCommandStatusEntity = new CommandStatusEntity(ksqlString, commandId, commandStatus);
final Map<String, Object> streamsProperties = Collections.emptyMap();
KsqlEntity testKsqlEntity = makeSingleRequest(testResource, ksqlString, ksqlStatement, streamsProperties, KsqlEntity.class);
assertEquals(expectedCommandStatusEntity, testKsqlEntity);
}
Aggregations