use of io.confluent.ksql.rest.entity.SchemaInfo in project ksql by confluentinc.
the class ConsoleTest method shouldPrintExplainQueryWithError.
@Test
public void shouldPrintExplainQueryWithError() {
final long timestamp = 1596644936314L;
// Given:
final QueryDescriptionEntity queryEntity = new QueryDescriptionEntity("statement", new QueryDescription(new QueryId("id"), "statement", Optional.empty(), ImmutableList.of(new FieldInfo("name", new SchemaInfo(SqlBaseType.STRING, ImmutableList.of(), null), Optional.empty())), ImmutableSet.of("source"), ImmutableSet.of("sink"), "topology", "executionPlan", ImmutableMap.of("overridden.prop", 42), ImmutableMap.of(new KsqlHostInfoEntity("foo", 123), KsqlQueryStatus.ERROR), KsqlQueryType.PERSISTENT, ImmutableList.of(new QueryError(timestamp, "error", Type.SYSTEM)), ImmutableSet.of(new StreamsTaskMetadata("test", Collections.emptySet(), Optional.empty())), "consumerGroupId"));
final KsqlEntityList entityList = new KsqlEntityList(ImmutableList.of(queryEntity));
// When:
console.printKsqlEntityList(entityList);
// Then:
final String output = terminal.getOutputString();
Approvals.verify(output, approvalOptions);
}
use of io.confluent.ksql.rest.entity.SchemaInfo in project ksql by confluentinc.
the class ClientTest method shouldDescribeSource.
@Test
public void shouldDescribeSource() throws Exception {
// Given
final io.confluent.ksql.rest.entity.SourceDescription sd = new io.confluent.ksql.rest.entity.SourceDescription("name", Optional.of(WindowType.TUMBLING), Collections.singletonList(new RunningQuery("query_sql", ImmutableSet.of("sink"), ImmutableSet.of("sink_topic"), new QueryId("a_persistent_query"), new QueryStatusCount(ImmutableMap.of(KsqlQueryStatus.RUNNING, 1)), KsqlQueryType.PERSISTENT)), Collections.emptyList(), ImmutableList.of(new FieldInfo("f1", new SchemaInfo(SqlBaseType.STRING, null, null), Optional.of(FieldType.KEY)), new FieldInfo("f2", new SchemaInfo(SqlBaseType.INTEGER, null, null), Optional.empty())), "TABLE", "", "", "", false, "KAFKA", "JSON", "topic", 4, 1, "sql", Collections.emptyList(), ImmutableList.of("s1", "s2"));
final SourceDescriptionEntity entity = new SourceDescriptionEntity("describe source;", sd, Collections.emptyList());
testEndpoints.setKsqlEndpointResponse(Collections.singletonList(entity));
// When
final SourceDescription description = javaClient.describeSource("source").get();
// Then
assertThat(description.name(), is("name"));
assertThat(description.type(), is("TABLE"));
assertThat(description.fields(), hasSize(2));
assertThat(description.fields().get(0).name(), is("f1"));
assertThat(description.fields().get(0).type().getType(), is(ColumnType.Type.STRING));
assertThat(description.fields().get(0).isKey(), is(true));
assertThat(description.fields().get(1).name(), is("f2"));
assertThat(description.fields().get(1).type().getType(), is(ColumnType.Type.INTEGER));
assertThat(description.fields().get(1).isKey(), is(false));
assertThat(description.topic(), is("topic"));
assertThat(description.keyFormat(), is("KAFKA"));
assertThat(description.valueFormat(), is("JSON"));
assertThat(description.readQueries(), hasSize(1));
assertThat(description.readQueries().get(0).getQueryType(), is(QueryType.PERSISTENT));
assertThat(description.readQueries().get(0).getId(), is("a_persistent_query"));
assertThat(description.readQueries().get(0).getSql(), is("query_sql"));
assertThat(description.readQueries().get(0).getSink(), is(Optional.of("sink")));
assertThat(description.readQueries().get(0).getSinkTopic(), is(Optional.of("sink_topic")));
assertThat(description.writeQueries(), hasSize(0));
assertThat(description.timestampColumn(), is(Optional.empty()));
assertThat(description.windowType(), is(Optional.of("TUMBLING")));
assertThat(description.sqlStatement(), is("sql"));
assertThat(description.getSourceConstraints(), hasItems("s1", "s2"));
}
use of io.confluent.ksql.rest.entity.SchemaInfo in project ksql by confluentinc.
the class ClientTest method shouldDescribeSourceWithoutSourceConstraints.
@Test
public void shouldDescribeSourceWithoutSourceConstraints() throws Exception {
// Given
final LegacySourceDescription sd = new LegacySourceDescription("name", Optional.of(WindowType.TUMBLING), Collections.singletonList(new RunningQuery("query_sql", ImmutableSet.of("sink"), ImmutableSet.of("sink_topic"), new QueryId("a_persistent_query"), new QueryStatusCount(ImmutableMap.of(KsqlQueryStatus.RUNNING, 1)), KsqlQueryType.PERSISTENT)), Collections.emptyList(), ImmutableList.of(new FieldInfo("f1", new SchemaInfo(SqlBaseType.STRING, null, null), Optional.of(FieldType.KEY)), new FieldInfo("f2", new SchemaInfo(SqlBaseType.INTEGER, null, null), Optional.empty())), "TABLE", "", false, "KAFKA", "JSON", "topic", 4, 1, "sql", Collections.emptyList());
final LegacySourceDescriptionEntity entity = new LegacySourceDescriptionEntity("describe source;", sd, Collections.emptyList());
testEndpoints.setKsqlEndpointResponse(Collections.singletonList(entity));
// When
final SourceDescription description = javaClient.describeSource("source").get();
// Then
assertThat(description.name(), is("name"));
assertThat(description.type(), is("TABLE"));
assertThat(description.fields(), hasSize(2));
assertThat(description.fields().get(0).name(), is("f1"));
assertThat(description.fields().get(0).type().getType(), is(ColumnType.Type.STRING));
assertThat(description.fields().get(0).isKey(), is(true));
assertThat(description.fields().get(1).name(), is("f2"));
assertThat(description.fields().get(1).type().getType(), is(ColumnType.Type.INTEGER));
assertThat(description.fields().get(1).isKey(), is(false));
assertThat(description.topic(), is("topic"));
assertThat(description.keyFormat(), is("KAFKA"));
assertThat(description.valueFormat(), is("JSON"));
assertThat(description.readQueries(), hasSize(1));
assertThat(description.readQueries().get(0).getQueryType(), is(QueryType.PERSISTENT));
assertThat(description.readQueries().get(0).getId(), is("a_persistent_query"));
assertThat(description.readQueries().get(0).getSql(), is("query_sql"));
assertThat(description.readQueries().get(0).getSink(), is(Optional.of("sink")));
assertThat(description.readQueries().get(0).getSinkTopic(), is(Optional.of("sink_topic")));
assertThat(description.writeQueries(), hasSize(0));
assertThat(description.timestampColumn(), is(Optional.empty()));
assertThat(description.windowType(), is(Optional.of("TUMBLING")));
assertThat(description.sqlStatement(), is("sql"));
assertThat(description.getSourceConstraints().size(), is(0));
}
use of io.confluent.ksql.rest.entity.SchemaInfo in project ksql by confluentinc.
the class ListTypesExecutor method execute.
public static StatementExecutorResponse execute(final ConfiguredStatement<ListTypes> configuredStatement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext) {
final ImmutableMap.Builder<String, SchemaInfo> types = ImmutableMap.builder();
final Iterator<CustomType> customTypes = executionContext.getMetaStore().types();
while (customTypes.hasNext()) {
final CustomType customType = customTypes.next();
types.put(customType.getName(), EntityUtil.schemaInfo(customType.getType()));
}
return StatementExecutorResponse.handled(Optional.of(new TypeList(configuredStatement.getStatementText(), types.build())));
}
use of io.confluent.ksql.rest.entity.SchemaInfo in project ksql by confluentinc.
the class ConsoleTest method shouldPrintTypesList.
@Test
public void shouldPrintTypesList() {
// Given:
final KsqlEntityList entities = new KsqlEntityList(ImmutableList.of(new TypeList("statement", ImmutableMap.of("typeB", new SchemaInfo(SqlBaseType.ARRAY, null, new SchemaInfo(SqlBaseType.STRING, null, null)), "typeA", new SchemaInfo(SqlBaseType.STRUCT, ImmutableList.of(new FieldInfo("f1", new SchemaInfo(SqlBaseType.STRING, null, null), Optional.empty())), null), "typeC", new SchemaInfo(SqlBaseType.DECIMAL, null, null, ImmutableMap.of("precision", 10, "scale", 9))))));
// When:
console.printKsqlEntityList(entities);
// Then:
final String output = terminal.getOutputString();
Approvals.verify(output, approvalOptions);
}
Aggregations