use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowWithNullValues.
@Test
@SuppressWarnings("unchecked")
public void shouldSerializeRowWithNullValues() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, null, null);
GenericRow genericRow = new GenericRow(columns);
ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldFailForIncompatibleType.
@Test
public void shouldFailForIncompatibleType() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, "item_1", "10.0", new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
try {
byte[] serilizedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
Assert.fail("Did not fail for incompatible types.");
} catch (Exception e) {
assertThat(e.getMessage(), equalTo("org.apache.kafka.common.errors.SerializationException: Error serializing Avro message"));
}
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project incubator-gobblin by apache.
the class ConfluentKafkaSchemaRegistryTest method doTestRegisterAndGetLatest.
private void doTestRegisterAndGetLatest(Properties properties) throws SchemaRegistryException {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KafkaSchemaRegistry<Integer, Schema> kafkaSchemaRegistry = new ConfluentKafkaSchemaRegistry(properties, schemaRegistryClient);
Schema schema1 = SchemaBuilder.record(TEST_RECORD_NAME + "1").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
Schema schema2 = SchemaBuilder.record(TEST_RECORD_NAME + "2").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type().stringType().noDefault().endRecord();
kafkaSchemaRegistry.register(schema1, TEST_TOPIC_NAME);
kafkaSchemaRegistry.register(schema2, TEST_TOPIC_NAME);
Assert.assertNotEquals(schema1, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME));
Assert.assertEquals(schema2, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME));
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class SchemaKStreamTest method testGroupByKey.
@Test
public void testGroupByKey() {
String selectQuery = "SELECT col0, col1 FROM test1 WHERE col0 > 100;";
PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream, ksqlStream.getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
Expression keyExpression = new DereferenceExpression(new QualifiedNameReference(QualifiedName.of("TEST1")), "COL0");
KsqlTopicSerDe ksqlTopicSerDe = new KsqlJsonTopicSerDe();
Serde<GenericRow> rowSerde = ksqlTopicSerDe.getGenericRowSerde(initialSchemaKStream.getSchema(), null, false, null);
List<Expression> groupByExpressions = Arrays.asList(keyExpression);
SchemaKGroupedStream groupedSchemaKStream = initialSchemaKStream.groupBy(Serdes.String(), rowSerde, groupByExpressions);
Assert.assertEquals(groupedSchemaKStream.getKeyField().name(), "COL0");
}
use of io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient in project ksql by confluentinc.
the class SchemaKStreamTest method testFilter.
@Test
public void testFilter() throws Exception {
String selectQuery = "SELECT col0, col2, col3 FROM test1 WHERE col0 > 100;";
PlanNode logicalPlan = planBuilder.buildLogicalPlan(selectQuery);
FilterNode filterNode = (FilterNode) logicalPlan.getSources().get(0).getSources().get(0);
initialSchemaKStream = new SchemaKStream(logicalPlan.getTheSourceNode().getSchema(), kStream, ksqlStream.getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, new MockSchemaRegistryClient());
SchemaKStream filteredSchemaKStream = initialSchemaKStream.filter(filterNode.getPredicate());
Assert.assertTrue(filteredSchemaKStream.getSchema().fields().size() == 6);
Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL0") == filteredSchemaKStream.getSchema().fields().get(0));
Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL1") == filteredSchemaKStream.getSchema().fields().get(1));
Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL2") == filteredSchemaKStream.getSchema().fields().get(2));
Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL3") == filteredSchemaKStream.getSchema().fields().get(3));
Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL0").schema().type() == Schema.Type.INT64);
Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL1").schema().type() == Schema.Type.STRING);
Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL2").schema().type() == Schema.Type.STRING);
Assert.assertTrue(filteredSchemaKStream.getSchema().field("TEST1.COL3").schema().type() == Schema.Type.FLOAT64);
Assert.assertTrue(filteredSchemaKStream.getSourceSchemaKStreams().get(0) == initialSchemaKStream);
}
Aggregations