use of io.confluent.ksql.rest.entity.ErrorMessage in project ksql by confluentinc.
the class Console method printAsTable.
private void printAsTable(KsqlEntity ksqlEntity) {
List<String> header = new ArrayList<>();
List<String> footer = new ArrayList<>();
List<String> columnHeaders = new ArrayList<>();
List<List<String>> rowValues = new ArrayList<>();
if (ksqlEntity instanceof CommandStatusEntity) {
CommandStatusEntity commandStatusEntity = (CommandStatusEntity) ksqlEntity;
columnHeaders = Arrays.asList("Message");
CommandStatus commandStatus = commandStatusEntity.getCommandStatus();
rowValues = Collections.singletonList(Arrays.asList(commandStatus.getMessage().split("\n", 2)[0]));
} else if (ksqlEntity instanceof ErrorMessageEntity) {
ErrorMessage errorMessage = ((ErrorMessageEntity) ksqlEntity).getErrorMessage();
printErrorMessage(errorMessage);
return;
} else if (ksqlEntity instanceof PropertiesList) {
PropertiesList propertiesList = CliUtils.propertiesListWithOverrides((PropertiesList) ksqlEntity, restClient.getLocalProperties());
Map<String, Object> properties = propertiesList.getProperties();
columnHeaders = Arrays.asList("Property", "Value");
rowValues = properties.entrySet().stream().map(propertyEntry -> Arrays.asList(propertyEntry.getKey(), Objects.toString(propertyEntry.getValue()))).collect(Collectors.toList());
} else if (ksqlEntity instanceof Queries) {
List<Queries.RunningQuery> runningQueries = ((Queries) ksqlEntity).getQueries();
columnHeaders = Arrays.asList("Query ID", "Kafka Topic", "Query String");
rowValues = runningQueries.stream().map(runningQuery -> Arrays.asList(runningQuery.getId().toString(), runningQuery.getKafkaTopic(), runningQuery.getQueryString())).collect(Collectors.toList());
footer.add("For detailed information on a Query run: EXPLAIN <Query ID>;");
} else if (ksqlEntity instanceof SourceDescription) {
SourceDescription sourceDescription = (SourceDescription) ksqlEntity;
List<SourceDescription.FieldSchemaInfo> fields = sourceDescription.getSchema();
if (!fields.isEmpty()) {
columnHeaders = Arrays.asList("Field", "Type");
rowValues = fields.stream().map(field -> Arrays.asList(field.getName(), formatFieldType(field, sourceDescription.getKey()))).collect(Collectors.toList());
}
printExtendedInformation(header, footer, sourceDescription);
} else if (ksqlEntity instanceof TopicDescription) {
columnHeaders = new ArrayList<>();
columnHeaders.add("Topic Name");
columnHeaders.add("Kafka Topic");
columnHeaders.add("Type");
List<String> topicInfo = new ArrayList<>();
TopicDescription topicDescription = (TopicDescription) ksqlEntity;
topicInfo.add(topicDescription.getName());
topicInfo.add(topicDescription.getKafkaTopic());
topicInfo.add(topicDescription.getFormat());
if (topicDescription.getFormat().equalsIgnoreCase("AVRO")) {
columnHeaders.add("AvroSchema");
topicInfo.add(topicDescription.getSchemaString());
}
rowValues = Arrays.asList(topicInfo);
} else if (ksqlEntity instanceof StreamsList) {
List<SourceInfo.Stream> streamInfos = ((StreamsList) ksqlEntity).getStreams();
columnHeaders = Arrays.asList("Stream Name", "Kafka Topic", "Format");
rowValues = streamInfos.stream().map(streamInfo -> Arrays.asList(streamInfo.getName(), streamInfo.getTopic(), streamInfo.getFormat())).collect(Collectors.toList());
} else if (ksqlEntity instanceof TablesList) {
List<SourceInfo.Table> tableInfos = ((TablesList) ksqlEntity).getTables();
columnHeaders = Arrays.asList("Table Name", "Kafka Topic", "Format", "Windowed");
rowValues = tableInfos.stream().map(tableInfo -> Arrays.asList(tableInfo.getName(), tableInfo.getTopic(), tableInfo.getFormat(), Boolean.toString(tableInfo.getIsWindowed()))).collect(Collectors.toList());
} else if (ksqlEntity instanceof KsqlTopicsList) {
List<KsqlTopicInfo> topicInfos = ((KsqlTopicsList) ksqlEntity).getTopics();
columnHeaders = Arrays.asList("Ksql Topic", "Kafka Topic", "Format");
rowValues = topicInfos.stream().map(topicInfo -> Arrays.asList(topicInfo.getName(), topicInfo.getKafkaTopic(), topicInfo.getFormat().name())).collect(Collectors.toList());
} else if (ksqlEntity instanceof KafkaTopicsList) {
List<KafkaTopicInfo> topicInfos = ((KafkaTopicsList) ksqlEntity).getTopics();
columnHeaders = Arrays.asList("Kafka Topic", "Registered", "Partitions", "Partition Replicas", "Consumers", "Consumer Groups");
rowValues = topicInfos.stream().map(topicInfo -> Arrays.asList(topicInfo.getName(), Boolean.toString(topicInfo.getRegistered()), Integer.toString(topicInfo.getReplicaInfo().size()), getTopicReplicaInfo(topicInfo.getReplicaInfo()), Integer.toString(topicInfo.getConsumerCount()), Integer.toString(topicInfo.getConsumerGroupCount()))).collect(Collectors.toList());
} else if (ksqlEntity instanceof ExecutionPlan) {
ExecutionPlan executionPlan = (ExecutionPlan) ksqlEntity;
columnHeaders = Arrays.asList("Execution Plan");
rowValues = Collections.singletonList(Arrays.asList(executionPlan.getExecutionPlan()));
} else {
throw new RuntimeException(String.format("Unexpected KsqlEntity class: '%s'", ksqlEntity.getClass().getCanonicalName()));
}
printTable(columnHeaders, rowValues, header, footer);
}
use of io.confluent.ksql.rest.entity.ErrorMessage in project ksql by confluentinc.
the class RemoteCliSpecificCommandTest method shouldPrintErrorOnErrorResponseFromRestClient.
@Test
public void shouldPrintErrorOnErrorResponseFromRestClient() {
final Cli.RemoteServerSpecificCommand command = new Cli.RemoteServerSpecificCommand(new KsqlRestClient("xxxx", Collections.emptyMap()) {
@Override
public RestResponse<ServerInfo> getServerInfo() {
return RestResponse.erroneous(new ErrorMessage("it is broken", Collections.emptyList()));
}
}, new PrintWriter(out));
command.execute("http://localhost:8088");
assertThat(out.toString(), containsString("it is broken"));
}
Aggregations