use of io.confluent.ksql.rest.entity.CommandStatusEntity in project ksql by confluentinc.
the class Cli method printKsqlResponse.
private void printKsqlResponse(RestResponse<KsqlEntityList> response) throws IOException {
if (response.isSuccessful()) {
KsqlEntityList ksqlEntities = response.getResponse();
boolean noErrorFromServer = true;
for (KsqlEntity entity : ksqlEntities) {
if (entity instanceof ErrorMessageEntity) {
ErrorMessageEntity errorMsg = (ErrorMessageEntity) entity;
terminal.printErrorMessage(errorMsg.getErrorMessage());
LOGGER.error(errorMsg.getErrorMessage().getMessage());
noErrorFromServer = false;
} else if (entity instanceof CommandStatusEntity && (((CommandStatusEntity) entity).getCommandStatus().getStatus() == CommandStatus.Status.ERROR)) {
String fullMessage = ((CommandStatusEntity) entity).getCommandStatus().getMessage();
terminal.printError(fullMessage.split("\n")[0], fullMessage);
noErrorFromServer = false;
}
}
if (noErrorFromServer) {
terminal.printKsqlEntityList(response.getResponse());
}
} else {
terminal.printErrorMessage(response.getErrorMessage());
}
}
use of io.confluent.ksql.rest.entity.CommandStatusEntity in project ksql by confluentinc.
the class Console method printAsTable.
private void printAsTable(KsqlEntity ksqlEntity) {
List<String> header = new ArrayList<>();
List<String> footer = new ArrayList<>();
List<String> columnHeaders = new ArrayList<>();
List<List<String>> rowValues = new ArrayList<>();
if (ksqlEntity instanceof CommandStatusEntity) {
CommandStatusEntity commandStatusEntity = (CommandStatusEntity) ksqlEntity;
columnHeaders = Arrays.asList("Message");
CommandStatus commandStatus = commandStatusEntity.getCommandStatus();
rowValues = Collections.singletonList(Arrays.asList(commandStatus.getMessage().split("\n", 2)[0]));
} else if (ksqlEntity instanceof ErrorMessageEntity) {
ErrorMessage errorMessage = ((ErrorMessageEntity) ksqlEntity).getErrorMessage();
printErrorMessage(errorMessage);
return;
} else if (ksqlEntity instanceof PropertiesList) {
PropertiesList propertiesList = CliUtils.propertiesListWithOverrides((PropertiesList) ksqlEntity, restClient.getLocalProperties());
Map<String, Object> properties = propertiesList.getProperties();
columnHeaders = Arrays.asList("Property", "Value");
rowValues = properties.entrySet().stream().map(propertyEntry -> Arrays.asList(propertyEntry.getKey(), Objects.toString(propertyEntry.getValue()))).collect(Collectors.toList());
} else if (ksqlEntity instanceof Queries) {
List<Queries.RunningQuery> runningQueries = ((Queries) ksqlEntity).getQueries();
columnHeaders = Arrays.asList("Query ID", "Kafka Topic", "Query String");
rowValues = runningQueries.stream().map(runningQuery -> Arrays.asList(runningQuery.getId().toString(), runningQuery.getKafkaTopic(), runningQuery.getQueryString())).collect(Collectors.toList());
footer.add("For detailed information on a Query run: EXPLAIN <Query ID>;");
} else if (ksqlEntity instanceof SourceDescription) {
SourceDescription sourceDescription = (SourceDescription) ksqlEntity;
List<SourceDescription.FieldSchemaInfo> fields = sourceDescription.getSchema();
if (!fields.isEmpty()) {
columnHeaders = Arrays.asList("Field", "Type");
rowValues = fields.stream().map(field -> Arrays.asList(field.getName(), formatFieldType(field, sourceDescription.getKey()))).collect(Collectors.toList());
}
printExtendedInformation(header, footer, sourceDescription);
} else if (ksqlEntity instanceof TopicDescription) {
columnHeaders = new ArrayList<>();
columnHeaders.add("Topic Name");
columnHeaders.add("Kafka Topic");
columnHeaders.add("Type");
List<String> topicInfo = new ArrayList<>();
TopicDescription topicDescription = (TopicDescription) ksqlEntity;
topicInfo.add(topicDescription.getName());
topicInfo.add(topicDescription.getKafkaTopic());
topicInfo.add(topicDescription.getFormat());
if (topicDescription.getFormat().equalsIgnoreCase("AVRO")) {
columnHeaders.add("AvroSchema");
topicInfo.add(topicDescription.getSchemaString());
}
rowValues = Arrays.asList(topicInfo);
} else if (ksqlEntity instanceof StreamsList) {
List<SourceInfo.Stream> streamInfos = ((StreamsList) ksqlEntity).getStreams();
columnHeaders = Arrays.asList("Stream Name", "Kafka Topic", "Format");
rowValues = streamInfos.stream().map(streamInfo -> Arrays.asList(streamInfo.getName(), streamInfo.getTopic(), streamInfo.getFormat())).collect(Collectors.toList());
} else if (ksqlEntity instanceof TablesList) {
List<SourceInfo.Table> tableInfos = ((TablesList) ksqlEntity).getTables();
columnHeaders = Arrays.asList("Table Name", "Kafka Topic", "Format", "Windowed");
rowValues = tableInfos.stream().map(tableInfo -> Arrays.asList(tableInfo.getName(), tableInfo.getTopic(), tableInfo.getFormat(), Boolean.toString(tableInfo.getIsWindowed()))).collect(Collectors.toList());
} else if (ksqlEntity instanceof KsqlTopicsList) {
List<KsqlTopicInfo> topicInfos = ((KsqlTopicsList) ksqlEntity).getTopics();
columnHeaders = Arrays.asList("Ksql Topic", "Kafka Topic", "Format");
rowValues = topicInfos.stream().map(topicInfo -> Arrays.asList(topicInfo.getName(), topicInfo.getKafkaTopic(), topicInfo.getFormat().name())).collect(Collectors.toList());
} else if (ksqlEntity instanceof KafkaTopicsList) {
List<KafkaTopicInfo> topicInfos = ((KafkaTopicsList) ksqlEntity).getTopics();
columnHeaders = Arrays.asList("Kafka Topic", "Registered", "Partitions", "Partition Replicas", "Consumers", "Consumer Groups");
rowValues = topicInfos.stream().map(topicInfo -> Arrays.asList(topicInfo.getName(), Boolean.toString(topicInfo.getRegistered()), Integer.toString(topicInfo.getReplicaInfo().size()), getTopicReplicaInfo(topicInfo.getReplicaInfo()), Integer.toString(topicInfo.getConsumerCount()), Integer.toString(topicInfo.getConsumerGroupCount()))).collect(Collectors.toList());
} else if (ksqlEntity instanceof ExecutionPlan) {
ExecutionPlan executionPlan = (ExecutionPlan) ksqlEntity;
columnHeaders = Arrays.asList("Execution Plan");
rowValues = Collections.singletonList(Arrays.asList(executionPlan.getExecutionPlan()));
} else {
throw new RuntimeException(String.format("Unexpected KsqlEntity class: '%s'", ksqlEntity.getClass().getCanonicalName()));
}
printTable(columnHeaders, rowValues, header, footer);
}
use of io.confluent.ksql.rest.entity.CommandStatusEntity in project ksql by confluentinc.
the class KsqlResource method distributeStatement.
private CommandStatusEntity distributeStatement(String statementText, Statement statement, Map<String, Object> streamsProperties) throws KsqlException {
CommandId commandId = commandStore.distributeStatement(statementText, statement, streamsProperties);
CommandStatus commandStatus;
try {
commandStatus = statementExecutor.registerQueuedStatement(commandId).get(distributedCommandResponseTimeout, TimeUnit.MILLISECONDS);
} catch (TimeoutException exception) {
log.warn("Timeout to get commandStatus, waited {} milliseconds:, statementText:" + statementText, distributedCommandResponseTimeout, exception);
commandStatus = statementExecutor.getStatus(commandId).get();
} catch (Exception e) {
throw new KsqlException(String.format("Could not write the statement '%s' into the command " + "topic.", statementText), e);
}
return new CommandStatusEntity(statementText, commandId, commandStatus);
}
use of io.confluent.ksql.rest.entity.CommandStatusEntity in project ksql by confluentinc.
the class ConsoleTest method testPrintKSqlEntityList.
@Test
public void testPrintKSqlEntityList() throws IOException {
Map<String, Object> properties = new HashMap<>();
properties.put("k1", 1);
properties.put("k2", "v2");
properties.put("k3", true);
List<Queries.RunningQuery> queries = new ArrayList<>();
queries.add(new Queries.RunningQuery("select * from t1", "TestTopic", new QueryId("0")));
for (int i = 0; i < 5; i++) {
KsqlEntityList entityList = new KsqlEntityList(Arrays.asList(new CommandStatusEntity("e", "topic/1/create", "SUCCESS", "Success Message"), new ErrorMessageEntity("e", new FakeException()), new PropertiesList("e", properties), new Queries("e", queries), new SourceDescription("e", "TestSource", Collections.EMPTY_LIST, Collections.EMPTY_LIST, buildTestSchema(i), DataSource.DataSourceType.KTABLE.getKqlType(), "key", "2000-01-01", "stats", "errors", false, "avro", "kadka-topic", "topology", "executionPlan", 1, 1), new TopicDescription("e", "TestTopic", "TestKafkaTopic", "AVRO", "schemaString"), new StreamsList("e", Arrays.asList(new SourceInfo.Stream("TestStream", "TestTopic", "AVRO"))), new TablesList("e", Arrays.asList(new SourceInfo.Table("TestTable", "TestTopic", "JSON", false))), new KsqlTopicsList("e", Arrays.asList(new KsqlTopicInfo("TestTopic", "TestKafkaTopic", DataSource.DataSourceSerDe.JSON))), new KafkaTopicsList("e", Arrays.asList(new KafkaTopicInfo("TestKafkaTopic", true, ImmutableList.of(1), 1, 1))), new ExecutionPlan("Test Execution Plan")));
terminal.printKsqlEntityList(entityList);
}
}
use of io.confluent.ksql.rest.entity.CommandStatusEntity in project ksql by confluentinc.
the class KsqlResourceTest method testInstantRegisterTopic.
@Test
public void testInstantRegisterTopic() throws Exception {
KsqlResource testResource = TestKsqlResourceUtil.get(ksqlEngine, ksqlRestConfig);
final String ksqlTopic = "FOO";
final String kafkaTopic = "bar";
final String format = "json";
final String ksqlString = String.format("REGISTER TOPIC %s WITH (kafka_topic='%s', value_format='%s');", ksqlTopic, kafkaTopic, format);
final Map<String, Expression> createTopicProperties = new HashMap<>();
createTopicProperties.put(DdlConfig.KAFKA_TOPIC_NAME_PROPERTY, new StringLiteral(kafkaTopic));
createTopicProperties.put(DdlConfig.VALUE_FORMAT_PROPERTY, new StringLiteral(format));
final RegisterTopic ksqlStatement = new RegisterTopic(QualifiedName.of(ksqlTopic), false, createTopicProperties);
final CommandId commandId = new CommandId(CommandId.Type.TOPIC, ksqlTopic, CommandId.Action.CREATE);
final CommandStatus commandStatus = new CommandStatus(CommandStatus.Status.QUEUED, "Statement written to command topic");
final CommandStatusEntity expectedCommandStatusEntity = new CommandStatusEntity(ksqlString, commandId, commandStatus);
final Map<String, Object> streamsProperties = Collections.emptyMap();
KsqlEntity testKsqlEntity = makeSingleRequest(testResource, ksqlString, ksqlStatement, streamsProperties, KsqlEntity.class);
assertEquals(expectedCommandStatusEntity, testKsqlEntity);
}
Aggregations