use of org.apache.kafka.connect.data.Field in project beam by apache.
the class SourceRecordJson method loadBefore.
/**
* Extracts the before field within SourceRecord.
*
* @return Before
*/
private Event.Before loadBefore() {
@Nullable Struct before;
try {
before = (Struct) this.value.get("before");
} catch (DataException e) {
return null;
}
if (before == null) {
return null;
}
Map<String, Object> fields = new HashMap<>();
for (Field field : before.schema().fields()) {
fields.put(field.name(), before.get(field));
}
return new Event.Before(fields);
}
use of org.apache.kafka.connect.data.Field in project beam by apache.
the class SourceRecordJson method loadAfter.
/**
* Extracts the after field within SourceRecord.
*
* @return After
*/
private Event.After loadAfter() {
@Nullable Struct after;
try {
after = (Struct) this.value.get("after");
} catch (DataException e) {
return null;
}
if (after == null) {
return null;
}
Map<String, Object> fields = new HashMap<>();
for (Field field : after.schema().fields()) {
fields.put(field.name(), after.get(field));
}
return new Event.After(fields);
}
use of org.apache.kafka.connect.data.Field in project dhis2-core by dhis2.
the class DbChangeEventHandler method getDataValueId.
private Serializable getDataValueId(SourceRecord sourceRecord) {
Schema schema = sourceRecord.keySchema();
List<Field> allIdFields = schema.fields();
Struct keyStruct = (Struct) sourceRecord.key();
Long dataElementId = (Long) getIdFromField(keyStruct, allIdFields.get(0));
Long periodId = (Long) getIdFromField(keyStruct, allIdFields.get(1));
Long organisationUnitId = (Long) getIdFromField(keyStruct, allIdFields.get(2));
Long categoryOptionComboId = (Long) getIdFromField(keyStruct, allIdFields.get(3));
Long attributeOptionComboId = (Long) getIdFromField(keyStruct, allIdFields.get(4));
DataElement dataElement = idObjectManager.get(DataElement.class, dataElementId);
OrganisationUnit organisationUnit = idObjectManager.get(OrganisationUnit.class, organisationUnitId);
CategoryOptionCombo categoryOptionCombo = idObjectManager.get(CategoryOptionCombo.class, categoryOptionComboId);
CategoryOptionCombo attributeOptionCombo = idObjectManager.get(CategoryOptionCombo.class, attributeOptionComboId);
Period period = periodService.getPeriod(periodId);
return new DataValue(dataElement, period, organisationUnit, categoryOptionCombo, attributeOptionCombo);
}
use of org.apache.kafka.connect.data.Field in project dhis2-core by dhis2.
the class DbChangeEventHandler method getEntityIdFromFirstField.
/**
* Tries to extract the entity ID from the source record.
*
* @param sourceRecord SourceRecord object containing info on the event.
*
* @return A Serializable object representing an entity ID.
*/
private Serializable getEntityIdFromFirstField(SourceRecord sourceRecord) {
Schema schema = sourceRecord.keySchema();
List<Field> allIdFields = schema.fields();
Struct keyStruct = (Struct) sourceRecord.key();
Field firstIdField = allIdFields.get(0);
return getIdFromField(keyStruct, firstIdField);
}
use of org.apache.kafka.connect.data.Field in project connect-utils by jcustenborder.
the class Parser method parseJsonNode.
public Object parseJsonNode(Schema schema, JsonNode input) {
checkSchemaAndInput(schema, input);
if (null == input || input.isNull()) {
return null;
}
log.trace("parseJsonNode() - schema.type() = {}", schema.type());
Object result;
if (Schema.Type.STRUCT == schema.type()) {
Struct struct = new Struct(schema);
Preconditions.checkState(input.isObject(), "struct schemas require a ObjectNode to be supplied for input.");
log.trace("parseJsonNode() - Processing as struct.");
final Set<String> processedFields = Sets.newHashSetWithExpectedSize(schema.fields().size());
for (Field field : schema.fields()) {
log.trace("parseJsonNode() - Processing field '{}:{}'", schema.name(), field.name());
JsonNode fieldInput = input.findValue(field.name());
try {
Object convertedValue = parseJsonNode(field.schema(), fieldInput);
struct.put(field, convertedValue);
} catch (Exception ex) {
throw new DataException(String.format("Exception thrown while processing %s:%s", schema.name(), field.name()), ex);
}
processedFields.add(field.name());
}
if (log.isTraceEnabled()) {
final Set<String> jsonFieldNames = Sets.newLinkedHashSet(ImmutableList.copyOf(input.fieldNames()));
Sets.SetView<String> difference = Sets.difference(jsonFieldNames, processedFields);
if (!difference.isEmpty()) {
log.trace("parseJsonNode() - Unprocessed fields for {}:\n{}", schema.name(), Joiner.on('\n').join(difference));
}
}
result = struct;
} else if (Schema.Type.ARRAY == schema.type()) {
Preconditions.checkState(input.isArray(), "array schemas require a ArrayNode to be supplied for input.");
log.trace("parseJsonNode() - Processing as array.");
List<Object> array = new ArrayList<>();
Iterator<JsonNode> arrayIterator = input.iterator();
int index = 0;
while (arrayIterator.hasNext()) {
log.trace("parseJsonNode() - Processing index {}", index);
JsonNode arrayInput = arrayIterator.next();
try {
Object arrayResult = parseJsonNode(schema.valueSchema(), arrayInput);
array.add(arrayResult);
} catch (Exception ex) {
throw new DataException(String.format("Exception thrown while processing index %s", index), ex);
}
index++;
}
result = array;
} else if (Schema.Type.MAP == schema.type()) {
Preconditions.checkState(input.isObject(), "map schemas require a ObjectNode to be supplied for input.");
log.trace("parseJsonNode() - Processing as map.");
Map<Object, Object> map = new LinkedHashMap<>();
Iterator<String> fieldNameIterator = input.fieldNames();
while (fieldNameIterator.hasNext()) {
final String fieldName = fieldNameIterator.next();
final JsonNode fieldInput = input.findValue(fieldName);
log.trace("parseJsonNode() - Processing key. Key='{}'", fieldName);
final Object mapKey;
try {
mapKey = parseString(schema.keySchema(), fieldName);
} catch (Exception ex) {
throw new DataException(String.format("Exception thrown while parsing key. Key='%s'", fieldName), ex);
}
log.trace("parseJsonNode() - Processing value. Key='{}'", fieldName);
final Object mapValue;
try {
mapValue = parseJsonNode(schema.keySchema(), fieldInput);
} catch (Exception ex) {
throw new DataException(String.format("Exception thrown while parsing value. Key='%s'", fieldName), ex);
}
map.put(mapKey, mapValue);
}
result = map;
} else {
TypeParser parser = findParser(schema);
try {
result = parser.parseJsonNode(input, schema);
} catch (Exception ex) {
String message = String.format("Could not parse '%s' to %s", input, parser.expectedClass().getSimpleName());
throw new DataException(message, ex);
}
}
return result;
}
Aggregations