Search in sources :

Example 36 with Field

use of org.apache.kafka.connect.data.Field in project beam by apache.

the class SourceRecordJson method loadBefore.

/**
 * Extracts the before field within SourceRecord.
 *
 * @return Before
 */
private Event.Before loadBefore() {
    @Nullable Struct before;
    try {
        before = (Struct) this.value.get("before");
    } catch (DataException e) {
        return null;
    }
    if (before == null) {
        return null;
    }
    Map<String, Object> fields = new HashMap<>();
    for (Field field : before.schema().fields()) {
        fields.put(field.name(), before.get(field));
    }
    return new Event.Before(fields);
}
Also used : DataException(org.apache.kafka.connect.errors.DataException) Field(org.apache.kafka.connect.data.Field) HashMap(java.util.HashMap) Nullable(org.checkerframework.checker.nullness.qual.Nullable) Struct(org.apache.kafka.connect.data.Struct)

Example 37 with Field

use of org.apache.kafka.connect.data.Field in project beam by apache.

the class SourceRecordJson method loadAfter.

/**
 * Extracts the after field within SourceRecord.
 *
 * @return After
 */
private Event.After loadAfter() {
    @Nullable Struct after;
    try {
        after = (Struct) this.value.get("after");
    } catch (DataException e) {
        return null;
    }
    if (after == null) {
        return null;
    }
    Map<String, Object> fields = new HashMap<>();
    for (Field field : after.schema().fields()) {
        fields.put(field.name(), after.get(field));
    }
    return new Event.After(fields);
}
Also used : DataException(org.apache.kafka.connect.errors.DataException) Field(org.apache.kafka.connect.data.Field) HashMap(java.util.HashMap) Nullable(org.checkerframework.checker.nullness.qual.Nullable) Struct(org.apache.kafka.connect.data.Struct)

Example 38 with Field

use of org.apache.kafka.connect.data.Field in project dhis2-core by dhis2.

the class DbChangeEventHandler method getDataValueId.

private Serializable getDataValueId(SourceRecord sourceRecord) {
    Schema schema = sourceRecord.keySchema();
    List<Field> allIdFields = schema.fields();
    Struct keyStruct = (Struct) sourceRecord.key();
    Long dataElementId = (Long) getIdFromField(keyStruct, allIdFields.get(0));
    Long periodId = (Long) getIdFromField(keyStruct, allIdFields.get(1));
    Long organisationUnitId = (Long) getIdFromField(keyStruct, allIdFields.get(2));
    Long categoryOptionComboId = (Long) getIdFromField(keyStruct, allIdFields.get(3));
    Long attributeOptionComboId = (Long) getIdFromField(keyStruct, allIdFields.get(4));
    DataElement dataElement = idObjectManager.get(DataElement.class, dataElementId);
    OrganisationUnit organisationUnit = idObjectManager.get(OrganisationUnit.class, organisationUnitId);
    CategoryOptionCombo categoryOptionCombo = idObjectManager.get(CategoryOptionCombo.class, categoryOptionComboId);
    CategoryOptionCombo attributeOptionCombo = idObjectManager.get(CategoryOptionCombo.class, attributeOptionComboId);
    Period period = periodService.getPeriod(periodId);
    return new DataValue(dataElement, period, organisationUnit, categoryOptionCombo, attributeOptionCombo);
}
Also used : Field(org.apache.kafka.connect.data.Field) DataElement(org.hisp.dhis.dataelement.DataElement) OrganisationUnit(org.hisp.dhis.organisationunit.OrganisationUnit) DataValue(org.hisp.dhis.datavalue.DataValue) Schema(org.apache.kafka.connect.data.Schema) Period(org.hisp.dhis.period.Period) CategoryOptionCombo(org.hisp.dhis.category.CategoryOptionCombo) Struct(org.apache.kafka.connect.data.Struct)

Example 39 with Field

use of org.apache.kafka.connect.data.Field in project dhis2-core by dhis2.

the class DbChangeEventHandler method getEntityIdFromFirstField.

/**
 * Tries to extract the entity ID from the source record.
 *
 * @param sourceRecord SourceRecord object containing info on the event.
 *
 * @return A Serializable object representing an entity ID.
 */
private Serializable getEntityIdFromFirstField(SourceRecord sourceRecord) {
    Schema schema = sourceRecord.keySchema();
    List<Field> allIdFields = schema.fields();
    Struct keyStruct = (Struct) sourceRecord.key();
    Field firstIdField = allIdFields.get(0);
    return getIdFromField(keyStruct, firstIdField);
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) Struct(org.apache.kafka.connect.data.Struct)

Example 40 with Field

use of org.apache.kafka.connect.data.Field in project connect-utils by jcustenborder.

the class Parser method parseJsonNode.

public Object parseJsonNode(Schema schema, JsonNode input) {
    checkSchemaAndInput(schema, input);
    if (null == input || input.isNull()) {
        return null;
    }
    log.trace("parseJsonNode() - schema.type() = {}", schema.type());
    Object result;
    if (Schema.Type.STRUCT == schema.type()) {
        Struct struct = new Struct(schema);
        Preconditions.checkState(input.isObject(), "struct schemas require a ObjectNode to be supplied for input.");
        log.trace("parseJsonNode() - Processing as struct.");
        final Set<String> processedFields = Sets.newHashSetWithExpectedSize(schema.fields().size());
        for (Field field : schema.fields()) {
            log.trace("parseJsonNode() - Processing field '{}:{}'", schema.name(), field.name());
            JsonNode fieldInput = input.findValue(field.name());
            try {
                Object convertedValue = parseJsonNode(field.schema(), fieldInput);
                struct.put(field, convertedValue);
            } catch (Exception ex) {
                throw new DataException(String.format("Exception thrown while processing %s:%s", schema.name(), field.name()), ex);
            }
            processedFields.add(field.name());
        }
        if (log.isTraceEnabled()) {
            final Set<String> jsonFieldNames = Sets.newLinkedHashSet(ImmutableList.copyOf(input.fieldNames()));
            Sets.SetView<String> difference = Sets.difference(jsonFieldNames, processedFields);
            if (!difference.isEmpty()) {
                log.trace("parseJsonNode() - Unprocessed fields for {}:\n{}", schema.name(), Joiner.on('\n').join(difference));
            }
        }
        result = struct;
    } else if (Schema.Type.ARRAY == schema.type()) {
        Preconditions.checkState(input.isArray(), "array schemas require a ArrayNode to be supplied for input.");
        log.trace("parseJsonNode() - Processing as array.");
        List<Object> array = new ArrayList<>();
        Iterator<JsonNode> arrayIterator = input.iterator();
        int index = 0;
        while (arrayIterator.hasNext()) {
            log.trace("parseJsonNode() - Processing index {}", index);
            JsonNode arrayInput = arrayIterator.next();
            try {
                Object arrayResult = parseJsonNode(schema.valueSchema(), arrayInput);
                array.add(arrayResult);
            } catch (Exception ex) {
                throw new DataException(String.format("Exception thrown while processing index %s", index), ex);
            }
            index++;
        }
        result = array;
    } else if (Schema.Type.MAP == schema.type()) {
        Preconditions.checkState(input.isObject(), "map schemas require a ObjectNode to be supplied for input.");
        log.trace("parseJsonNode() - Processing as map.");
        Map<Object, Object> map = new LinkedHashMap<>();
        Iterator<String> fieldNameIterator = input.fieldNames();
        while (fieldNameIterator.hasNext()) {
            final String fieldName = fieldNameIterator.next();
            final JsonNode fieldInput = input.findValue(fieldName);
            log.trace("parseJsonNode() - Processing key. Key='{}'", fieldName);
            final Object mapKey;
            try {
                mapKey = parseString(schema.keySchema(), fieldName);
            } catch (Exception ex) {
                throw new DataException(String.format("Exception thrown while parsing key. Key='%s'", fieldName), ex);
            }
            log.trace("parseJsonNode() - Processing value. Key='{}'", fieldName);
            final Object mapValue;
            try {
                mapValue = parseJsonNode(schema.keySchema(), fieldInput);
            } catch (Exception ex) {
                throw new DataException(String.format("Exception thrown while parsing value. Key='%s'", fieldName), ex);
            }
            map.put(mapKey, mapValue);
        }
        result = map;
    } else {
        TypeParser parser = findParser(schema);
        try {
            result = parser.parseJsonNode(input, schema);
        } catch (Exception ex) {
            String message = String.format("Could not parse '%s' to %s", input, parser.expectedClass().getSimpleName());
            throw new DataException(message, ex);
        }
    }
    return result;
}
Also used : Float32TypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.Float32TypeParser) Float64TypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.Float64TypeParser) Int64TypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.Int64TypeParser) DecimalTypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.DecimalTypeParser) Int32TypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.Int32TypeParser) Int16TypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.Int16TypeParser) Int8TypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.Int8TypeParser) TimestampTypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.TimestampTypeParser) DateTypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.DateTypeParser) TypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.TypeParser) TimeTypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.TimeTypeParser) StringTypeParser(com.github.jcustenborder.kafka.connect.utils.data.type.StringTypeParser) JsonNode(com.fasterxml.jackson.databind.JsonNode) DataException(org.apache.kafka.connect.errors.DataException) Struct(org.apache.kafka.connect.data.Struct) LinkedHashMap(java.util.LinkedHashMap) Field(org.apache.kafka.connect.data.Field) DataException(org.apache.kafka.connect.errors.DataException) Sets(com.google.common.collect.Sets) Iterator(java.util.Iterator) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List)

Aggregations

Field (org.apache.kafka.connect.data.Field)82 Struct (org.apache.kafka.connect.data.Struct)38 Schema (org.apache.kafka.connect.data.Schema)33 SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)17 DataException (org.apache.kafka.connect.errors.DataException)14 List (java.util.List)12 ArrayList (java.util.ArrayList)11 Requirements.requireStruct (org.apache.kafka.connect.transforms.util.Requirements.requireStruct)11 HashMap (java.util.HashMap)10 Map (java.util.Map)8 Test (org.junit.Test)8 Date (java.util.Date)7 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)6 KsqlException (io.confluent.ksql.util.KsqlException)5 BigDecimal (java.math.BigDecimal)5 ArrayNode (com.fasterxml.jackson.databind.node.ArrayNode)4 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)4 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)4 ByteBuffer (java.nio.ByteBuffer)4 JsonNode (com.fasterxml.jackson.databind.JsonNode)3