use of io.confluent.ksql.schema.ksql.PhysicalSchema in project ksql by confluentinc.
the class KsqlResourceFunctionalTest method shouldInsertIntoValuesForAvroTopic.
@Test
public void shouldInsertIntoValuesForAvroTopic() throws Exception {
// Given:
final PhysicalSchema schema = PhysicalSchema.from(LogicalSchema.builder().keyColumn(ColumnName.of("AUTHOR"), SqlTypes.STRING).valueColumn(ColumnName.of("TITLE"), SqlTypes.STRING).build(), SerdeFeatures.of(SerdeFeature.UNWRAP_SINGLES), SerdeFeatures.of());
final SchemaTranslator translator = new AvroFormat().getSchemaTranslator(ImmutableMap.of(ConnectProperties.FULL_SCHEMA_NAME, "books_value"));
final ParsedSchema keySchema = translator.toParsedSchema(PersistenceSchema.from(schema.logicalSchema().key(), schema.keySchema().features()));
TEST_HARNESS.getSchemaRegistryClient().register(KsqlConstants.getSRSubject("books", true), keySchema);
final ParsedSchema valueSchema = translator.toParsedSchema(PersistenceSchema.from(schema.logicalSchema().value(), schema.valueSchema().features()));
TEST_HARNESS.getSchemaRegistryClient().register(KsqlConstants.getSRSubject("books", false), valueSchema);
// When:
final List<KsqlEntity> results = makeKsqlRequest("" + "CREATE STREAM books (author VARCHAR KEY, title VARCHAR) " + "WITH (kafka_topic='books', format='avro', partitions=1);" + " " + "INSERT INTO BOOKS (ROWTIME, author, title) VALUES (123, 'Metamorphosis', 'Franz Kafka');");
// Then:
assertSuccessful(results);
TEST_HARNESS.verifyAvailableRows("books", contains(matches(genericKey("Metamorphosis"), genericRow("Franz Kafka"), 0, 0L, 123L)), FormatFactory.AVRO, FormatFactory.AVRO, schema);
}
use of io.confluent.ksql.schema.ksql.PhysicalSchema in project ksql by confluentinc.
the class StandaloneExecutorFunctionalTest method shouldHandleAvroWithSchemas.
@Test
public void shouldHandleAvroWithSchemas() {
// Given:
givenScript("" + "CREATE STREAM S (ROWKEY STRING KEY, ORDERTIME BIGINT)" + " WITH (kafka_topic='" + AVRO_TOPIC + "', value_format='avro');\n" + "\n" + "CREATE TABLE T (ROWKEY STRING PRIMARY KEY, ORDERTIME BIGINT) " + " WITH (kafka_topic='" + AVRO_TOPIC + "', value_format='avro');\n" + "\n" + "SET 'auto.offset.reset' = 'earliest';" + "\n" + "CREATE STREAM " + s1 + " AS SELECT * FROM S;\n" + "\n" + "INSERT INTO " + s1 + " SELECT * FROM S;\n" + "\n" + "CREATE TABLE " + t1 + " AS SELECT * FROM T;\n" + "\n" + "UNSET 'auto.offset.reset';" + "\n" + "CREATE STREAM " + s2 + " AS SELECT * FROM S;\n");
final PhysicalSchema dataSchema = PhysicalSchema.from(LogicalSchema.builder().keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING).valueColumn(ColumnName.of("ORDERTIME"), SqlTypes.BIGINT).build(), SerdeFeatures.of(), SerdeFeatures.of());
// When:
standalone.startAsync();
// Then:
// CSAS and INSERT INTO both input into S1:
TEST_HARNESS.verifyAvailableRows(s1, DATA_SIZE * 2, KAFKA, AVRO, dataSchema);
// CTAS only into T1:
TEST_HARNESS.verifyAvailableUniqueRows(t1, UNIQUE_DATA_SIZE, KAFKA, AVRO, dataSchema);
// S2 should be empty as 'auto.offset.reset' unset:
TEST_HARNESS.verifyAvailableUniqueRows(s2, 0, KAFKA, AVRO, dataSchema);
}
use of io.confluent.ksql.schema.ksql.PhysicalSchema in project ksql by confluentinc.
the class SourceBuilderBase method buildTable.
KTableHolder<GenericKey> buildTable(final RuntimeBuildContext buildContext, final SourceStep<KTableHolder<GenericKey>> source, final ConsumedFactory consumedFactory, final MaterializedFactory materializedFactory, final PlanInfo planInfo) {
final PhysicalSchema physicalSchema = getPhysicalSchema(source);
final Serde<GenericRow> valueSerde = getValueSerde(buildContext, source, physicalSchema);
final Serde<GenericKey> keySerde = getKeySerde(source, physicalSchema, buildContext);
final Consumed<GenericKey, GenericRow> consumed = buildSourceConsumed(source, keySerde, valueSerde, AutoOffsetReset.EARLIEST, buildContext, consumedFactory);
final String stateStoreName = tableChangeLogOpName(source.getProperties());
final Materialized<GenericKey, GenericRow, KeyValueStore<Bytes, byte[]>> materialized = buildTableMaterialized(source, buildContext, materializedFactory, keySerde, valueSerde, stateStoreName);
final KTable<GenericKey, GenericRow> ktable = buildKTable(source, buildContext, consumed, GenericKey::values, materialized, valueSerde, stateStoreName, planInfo);
final LogicalSchema stateStoreSchema = source.getSourceSchema().withPseudoColumnsToMaterialize(source.getPseudoColumnVersion());
return KTableHolder.materialized(ktable, buildSchema(source, false), ExecutionKeyFactory.unwindowed(buildContext), MaterializationInfo.builder(stateStoreName, stateStoreSchema));
}
use of io.confluent.ksql.schema.ksql.PhysicalSchema in project ksql by confluentinc.
the class SourceBuilderV1 method buildStream.
public KStreamHolder<GenericKey> buildStream(final RuntimeBuildContext buildContext, final StreamSource source, final ConsumedFactory consumedFactory) {
final PhysicalSchema physicalSchema = getPhysicalSchema(source);
final Serde<GenericRow> valueSerde = getValueSerde(buildContext, source, physicalSchema);
final Serde<GenericKey> keySerde = getKeySerde(source, physicalSchema, buildContext);
final Consumed<GenericKey, GenericRow> consumed = buildSourceConsumed(source, keySerde, valueSerde, AutoOffsetReset.LATEST, buildContext, consumedFactory);
final KStream<GenericKey, GenericRow> kstream = buildKStream(source, buildContext, consumed, nonWindowedKeyGenerator(source.getSourceSchema()));
return new KStreamHolder<>(kstream, buildSchema(source, false), ExecutionKeyFactory.unwindowed(buildContext));
}
use of io.confluent.ksql.schema.ksql.PhysicalSchema in project ksql by confluentinc.
the class SourceBuilderV1 method buildWindowedStream.
public KStreamHolder<Windowed<GenericKey>> buildWindowedStream(final RuntimeBuildContext buildContext, final WindowedStreamSource source, final ConsumedFactory consumedFactory) {
final PhysicalSchema physicalSchema = getPhysicalSchema(source);
final Serde<GenericRow> valueSerde = getValueSerde(buildContext, source, physicalSchema);
final WindowInfo windowInfo = source.getWindowInfo();
final Serde<Windowed<GenericKey>> keySerde = getWindowedKeySerde(source, physicalSchema, buildContext, windowInfo);
final Consumed<Windowed<GenericKey>, GenericRow> consumed = buildSourceConsumed(source, keySerde, valueSerde, AutoOffsetReset.LATEST, buildContext, consumedFactory);
final KStream<Windowed<GenericKey>, GenericRow> kstream = buildKStream(source, buildContext, consumed, windowedKeyGenerator(source.getSourceSchema()));
return new KStreamHolder<>(kstream, buildSchema(source, true), ExecutionKeyFactory.windowed(buildContext, windowInfo));
}
Aggregations