Search in sources :

Example 61 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlDelimitedDeserializerTest method shouldDeserializeDelimitedCorrectly.

@Test
public void shouldDeserializeDelimitedCorrectly() {
    String rowString = "1511897796092,1,item_1,10.0\r\n";
    KsqlDelimitedDeserializer ksqlJsonDeserializer = new KsqlDelimitedDeserializer(orderSchema);
    GenericRow genericRow = ksqlJsonDeserializer.deserialize("", rowString.getBytes());
    assertThat(genericRow.getColumns().size(), equalTo(4));
    assertThat((Long) genericRow.getColumns().get(0), equalTo(1511897796092L));
    assertThat((Long) genericRow.getColumns().get(1), equalTo(1L));
    assertThat((String) genericRow.getColumns().get(2), equalTo("item_1"));
    assertThat((Double) genericRow.getColumns().get(3), equalTo(10.0));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) Test(org.junit.Test)

Example 62 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlJsonSerializerTest method shouldSerializeRowCorrectly.

@Test
public void shouldSerializeRowCorrectly() {
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
    GenericRow genericRow = new GenericRow(columns);
    KsqlJsonSerializer ksqlJsonDeserializer = new KsqlJsonSerializer(orderSchema);
    byte[] bytes = ksqlJsonDeserializer.serialize("t1", genericRow);
    String jsonString = new String(bytes);
    assertThat("Incorrect serialization.", jsonString, equalTo("{\"ORDERID\":1,\"ARRAYCOL\":[100.0],\"ORDERUNITS\":10.0,\"ORDERTIME\":1511897796092,\"MAPCOL\":{\"key1\":100.0},\"ITEMID\":\"item_1\"}"));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) List(java.util.List) Test(org.junit.Test)

Example 63 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlJsonSerializerTest method shouldSerializeRowWithNull.

@Test
public void shouldSerializeRowWithNull() {
    List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, null, null);
    GenericRow genericRow = new GenericRow(columns);
    KsqlJsonSerializer ksqlJsonDeserializer = new KsqlJsonSerializer(orderSchema);
    byte[] bytes = ksqlJsonDeserializer.serialize("t1", genericRow);
    String jsonString = new String(bytes);
    assertThat("Incorrect serialization.", jsonString, equalTo("{\"ORDERID\":1,\"ARRAYCOL\":null,\"ORDERUNITS\":10.0,\"ORDERTIME\":1511897796092,\"MAPCOL\":null,\"ITEMID\":\"item_1\"}"));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) List(java.util.List) Test(org.junit.Test)

Example 64 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlGenericRowAvroDeserializer method deserialize.

@SuppressWarnings("unchecked")
@Override
public GenericRow deserialize(final String topic, final byte[] bytes) {
    if (bytes == null) {
        return null;
    }
    GenericRow genericRow;
    try {
        GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize(topic, bytes);
        Map<String, String> caseInsensitiveFieldNameMap = getCaseInsensitiveFieldMap(genericRecord);
        List columns = new ArrayList();
        for (Field field : schema.fields()) {
            // Set the missing fields to null. We can make this configurable later.
            if (genericRecord.get(caseInsensitiveFieldNameMap.get(field.name().toUpperCase())) == null) {
                columns.add(null);
            } else {
                columns.add(enforceFieldType(field.schema(), genericRecord.get(caseInsensitiveFieldNameMap.get(field.name().toUpperCase()))));
            }
        }
        genericRow = new GenericRow(columns);
    } catch (Exception e) {
        throw new SerializationException(e);
    }
    return genericRow;
}
Also used : GenericRow(io.confluent.ksql.GenericRow) Field(org.apache.kafka.connect.data.Field) SerializationException(org.apache.kafka.common.errors.SerializationException) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) GenericRecord(org.apache.avro.generic.GenericRecord) SerializationException(org.apache.kafka.common.errors.SerializationException) KsqlException(io.confluent.ksql.util.KsqlException)

Example 65 with GenericRow

use of io.confluent.ksql.GenericRow in project ksql by confluentinc.

the class KsqlDelimitedDeserializer method deserialize.

@Override
public GenericRow deserialize(final String topic, final byte[] bytes) {
    if (bytes == null) {
        return null;
    }
    String recordCsvString = new String(bytes, StandardCharsets.UTF_8);
    try {
        List<CSVRecord> csvRecords = CSVParser.parse(recordCsvString, CSVFormat.DEFAULT).getRecords();
        if (csvRecords == null || csvRecords.isEmpty()) {
            throw new KsqlException("Deserialization error in the delimited line: " + recordCsvString);
        }
        CSVRecord csvRecord = csvRecords.get(0);
        if (csvRecord == null || csvRecord.size() == 0) {
            throw new KsqlException("Deserialization error in the delimited line: " + recordCsvString);
        }
        List<Object> columns = new ArrayList<>();
        if (csvRecord.size() != schema.fields().size()) {
            throw new KsqlException(String.format("Unexpected field count, csvFields:%d schemaFields:%d line: %s", csvRecord.size(), schema.fields().size(), recordCsvString));
        }
        for (int i = 0; i < csvRecord.size(); i++) {
            if (csvRecord.get(i) == null) {
                columns.add(null);
            } else {
                columns.add(enforceFieldType(schema.fields().get(i).schema(), csvRecord.get(i)));
            }
        }
        return new GenericRow(columns);
    } catch (Exception e) {
        throw new SerializationException("Exception in deserializing the delimited row: " + recordCsvString, e);
    }
}
Also used : GenericRow(io.confluent.ksql.GenericRow) SerializationException(org.apache.kafka.common.errors.SerializationException) ArrayList(java.util.ArrayList) CSVRecord(org.apache.commons.csv.CSVRecord) KsqlException(io.confluent.ksql.util.KsqlException) SerializationException(org.apache.kafka.common.errors.SerializationException) KsqlException(io.confluent.ksql.util.KsqlException)

Aggregations

GenericRow (io.confluent.ksql.GenericRow)65 Test (org.junit.Test)38 HashMap (java.util.HashMap)27 Schema (org.apache.kafka.connect.data.Schema)19 List (java.util.List)15 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)15 ArrayList (java.util.ArrayList)11 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)9 IntegrationTest (io.confluent.common.utils.IntegrationTest)8 SchemaRegistryClient (io.confluent.kafka.schemaregistry.client.SchemaRegistryClient)8 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)5 KsqlTopicSerDe (io.confluent.ksql.serde.KsqlTopicSerDe)5 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)5 KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)5 KsqlConfig (io.confluent.ksql.util.KsqlConfig)5 Map (java.util.Map)5 GenericRecord (org.apache.avro.generic.GenericRecord)4 Windowed (org.apache.kafka.streams.kstream.Windowed)4 KafkaAvroDeserializer (io.confluent.kafka.serializers.KafkaAvroDeserializer)3 DereferenceExpression (io.confluent.ksql.parser.tree.DereferenceExpression)3