use of io.confluent.ksql.serde.avro.KsqlAvroTopicSerDe in project ksql by confluentinc.
the class AvroUtilTest method shouldFailForInvalidResultAvroSchema.
@Test
public void shouldFailForInvalidResultAvroSchema() throws IOException, RestClientException {
SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
KsqlTopic resultTopic = new KsqlTopic("testTopic", "testTopic", new KsqlAvroTopicSerDe());
Schema resultSchema = SerDeUtil.getSchemaFromAvro(ordersAveroSchemaStr);
PersistentQueryMetadata persistentQueryMetadata = new PersistentQueryMetadata("", null, null, "", null, DataSource.DataSourceType.KSTREAM, "", mock(KafkaTopicClient.class), resultSchema, resultTopic, null);
expect(schemaRegistryClient.testCompatibility(anyString(), anyObject())).andReturn(false);
replay(schemaRegistryClient);
try {
avroUtil.validatePersistentQueryResults(persistentQueryMetadata, schemaRegistryClient);
fail();
} catch (Exception e) {
assertThat("Incorrect exception message", "Cannot register avro schema for testTopic since " + "it is not valid for schema registry.", equalTo(e.getMessage()));
}
}
use of io.confluent.ksql.serde.avro.KsqlAvroTopicSerDe in project ksql by confluentinc.
the class KsqlStructuredDataOutputNode method buildStream.
@Override
public SchemaKStream buildStream(final StreamsBuilder builder, final KsqlConfig ksqlConfig, final KafkaTopicClient kafkaTopicClient, final FunctionRegistry functionRegistry, final Map<String, Object> props, final SchemaRegistryClient schemaRegistryClient) {
final SchemaKStream schemaKStream = getSource().buildStream(builder, ksqlConfig, kafkaTopicClient, functionRegistry, props, schemaRegistryClient);
final Set<Integer> rowkeyIndexes = SchemaUtil.getRowTimeRowKeyIndexes(getSchema());
final Builder outputNodeBuilder = Builder.from(this);
final Schema schema = SchemaUtil.removeImplicitRowTimeRowKeyFromSchema(getSchema());
outputNodeBuilder.withSchema(schema);
if (getTopicSerde() instanceof KsqlAvroTopicSerDe) {
addAvroSchemaToResultTopic(outputNodeBuilder);
}
final Map<String, Object> outputProperties = getOutputProperties();
if (outputProperties.containsKey(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY)) {
ksqlConfig.put(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY, outputProperties.get(KsqlConfig.SINK_NUMBER_OF_PARTITIONS_PROPERTY));
}
if (outputProperties.containsKey(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY)) {
ksqlConfig.put(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY, outputProperties.get(KsqlConfig.SINK_NUMBER_OF_REPLICAS_PROPERTY));
}
final SchemaKStream result = createOutputStream(schemaKStream, outputNodeBuilder, functionRegistry, outputProperties, schemaRegistryClient);
final KsqlStructuredDataOutputNode noRowKey = outputNodeBuilder.build();
createSinkTopic(noRowKey.getKafkaTopicName(), ksqlConfig, kafkaTopicClient, shoulBeCompacted(result));
result.into(noRowKey.getKafkaTopicName(), noRowKey.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(noRowKey.getSchema(), ksqlConfig, false, schemaRegistryClient), rowkeyIndexes);
result.setOutputNode(outputNodeBuilder.withSchema(SchemaUtil.addImplicitRowTimeRowKeyToSchema(noRowKey.getSchema())).build());
return result;
}
use of io.confluent.ksql.serde.avro.KsqlAvroTopicSerDe in project ksql by confluentinc.
the class AvroUtilTest method shouldValidatePersistentQueryResultCorrectly.
@Test
public void shouldValidatePersistentQueryResultCorrectly() throws IOException, RestClientException {
SchemaRegistryClient schemaRegistryClient = mock(SchemaRegistryClient.class);
KsqlTopic resultTopic = new KsqlTopic("testTopic", "testTopic", new KsqlAvroTopicSerDe());
Schema resultSchema = SerDeUtil.getSchemaFromAvro(ordersAveroSchemaStr);
PersistentQueryMetadata persistentQueryMetadata = new PersistentQueryMetadata("", null, null, "", null, DataSource.DataSourceType.KSTREAM, "", mock(KafkaTopicClient.class), resultSchema, resultTopic, null);
org.apache.avro.Schema.Parser parser = new org.apache.avro.Schema.Parser();
org.apache.avro.Schema avroSchema = parser.parse(ordersAveroSchemaStr);
expect(schemaRegistryClient.testCompatibility(anyString(), EasyMock.isA(avroSchema.getClass()))).andReturn(true);
replay(schemaRegistryClient);
avroUtil.validatePersistentQueryResults(persistentQueryMetadata, schemaRegistryClient);
}
use of io.confluent.ksql.serde.avro.KsqlAvroTopicSerDe in project ksql by confluentinc.
the class Analyzer method analyzeNonStdOutSink.
private void analyzeNonStdOutSink() {
List<Pair<StructuredDataSource, String>> fromDataSources = analysis.getFromDataSources();
StructuredDataSource intoStructuredDataSource = analysis.getInto();
String intoKafkaTopicName = analysis.getIntoKafkaTopicName();
if (intoKafkaTopicName == null) {
intoKafkaTopicName = intoStructuredDataSource.getName();
}
KsqlTopicSerDe intoTopicSerde = fromDataSources.get(0).getLeft().getKsqlTopic().getKsqlTopicSerDe();
if (analysis.getIntoFormat() != null) {
switch(analysis.getIntoFormat().toUpperCase()) {
case DataSource.AVRO_SERDE_NAME:
intoTopicSerde = new KsqlAvroTopicSerDe();
break;
case DataSource.JSON_SERDE_NAME:
intoTopicSerde = new KsqlJsonTopicSerDe();
break;
case DataSource.DELIMITED_SERDE_NAME:
intoTopicSerde = new KsqlDelimitedTopicSerDe();
break;
default:
throw new KsqlException(String.format("Unsupported format: %s", analysis.getIntoFormat()));
}
} else {
if (intoTopicSerde instanceof KsqlAvroTopicSerDe) {
intoTopicSerde = new KsqlAvroTopicSerDe();
}
}
KsqlTopic newIntoKsqlTopic = new KsqlTopic(intoKafkaTopicName, intoKafkaTopicName, intoTopicSerde);
KsqlStream intoKsqlStream = new KsqlStream(sqlExpression, intoStructuredDataSource.getName(), null, null, null, newIntoKsqlTopic);
analysis.setInto(intoKsqlStream);
}
use of io.confluent.ksql.serde.avro.KsqlAvroTopicSerDe in project ksql by confluentinc.
the class KsqlStructuredDataOutputNode method addAvroSchemaToResultTopic.
private void addAvroSchemaToResultTopic(final Builder builder) {
final KsqlAvroTopicSerDe ksqlAvroTopicSerDe = new KsqlAvroTopicSerDe();
builder.withKsqlTopic(new KsqlTopic(getKsqlTopic().getName(), getKsqlTopic().getKafkaTopicName(), ksqlAvroTopicSerDe));
}
Aggregations