use of com.github.jcustenborder.kafka.connect.utils.data.type.TypeParser in project connect-utils by jcustenborder.
the class Parser method parseJsonNode.
public Object parseJsonNode(Schema schema, JsonNode input) {
checkSchemaAndInput(schema, input);
if (null == input || input.isNull()) {
return null;
}
log.trace("parseJsonNode() - schema.type() = {}", schema.type());
Object result;
if (Schema.Type.STRUCT == schema.type()) {
Struct struct = new Struct(schema);
Preconditions.checkState(input.isObject(), "struct schemas require a ObjectNode to be supplied for input.");
log.trace("parseJsonNode() - Processing as struct.");
final Set<String> processedFields = Sets.newHashSetWithExpectedSize(schema.fields().size());
for (Field field : schema.fields()) {
log.trace("parseJsonNode() - Processing field '{}:{}'", schema.name(), field.name());
JsonNode fieldInput = input.findValue(field.name());
try {
Object convertedValue = parseJsonNode(field.schema(), fieldInput);
struct.put(field, convertedValue);
} catch (Exception ex) {
throw new DataException(String.format("Exception thrown while processing %s:%s", schema.name(), field.name()), ex);
}
processedFields.add(field.name());
}
if (log.isTraceEnabled()) {
final Set<String> jsonFieldNames = Sets.newLinkedHashSet(ImmutableList.copyOf(input.fieldNames()));
Sets.SetView<String> difference = Sets.difference(jsonFieldNames, processedFields);
if (!difference.isEmpty()) {
log.trace("parseJsonNode() - Unprocessed fields for {}:\n{}", schema.name(), Joiner.on('\n').join(difference));
}
}
result = struct;
} else if (Schema.Type.ARRAY == schema.type()) {
Preconditions.checkState(input.isArray(), "array schemas require a ArrayNode to be supplied for input.");
log.trace("parseJsonNode() - Processing as array.");
List<Object> array = new ArrayList<>();
Iterator<JsonNode> arrayIterator = input.iterator();
int index = 0;
while (arrayIterator.hasNext()) {
log.trace("parseJsonNode() - Processing index {}", index);
JsonNode arrayInput = arrayIterator.next();
try {
Object arrayResult = parseJsonNode(schema.valueSchema(), arrayInput);
array.add(arrayResult);
} catch (Exception ex) {
throw new DataException(String.format("Exception thrown while processing index %s", index), ex);
}
index++;
}
result = array;
} else if (Schema.Type.MAP == schema.type()) {
Preconditions.checkState(input.isObject(), "map schemas require a ObjectNode to be supplied for input.");
log.trace("parseJsonNode() - Processing as map.");
Map<Object, Object> map = new LinkedHashMap<>();
Iterator<String> fieldNameIterator = input.fieldNames();
while (fieldNameIterator.hasNext()) {
final String fieldName = fieldNameIterator.next();
final JsonNode fieldInput = input.findValue(fieldName);
log.trace("parseJsonNode() - Processing key. Key='{}'", fieldName);
final Object mapKey;
try {
mapKey = parseString(schema.keySchema(), fieldName);
} catch (Exception ex) {
throw new DataException(String.format("Exception thrown while parsing key. Key='%s'", fieldName), ex);
}
log.trace("parseJsonNode() - Processing value. Key='{}'", fieldName);
final Object mapValue;
try {
mapValue = parseJsonNode(schema.keySchema(), fieldInput);
} catch (Exception ex) {
throw new DataException(String.format("Exception thrown while parsing value. Key='%s'", fieldName), ex);
}
map.put(mapKey, mapValue);
}
result = map;
} else {
TypeParser parser = findParser(schema);
try {
result = parser.parseJsonNode(input, schema);
} catch (Exception ex) {
String message = String.format("Could not parse '%s' to %s", input, parser.expectedClass().getSimpleName());
throw new DataException(message, ex);
}
}
return result;
}
use of com.github.jcustenborder.kafka.connect.utils.data.type.TypeParser in project connect-utils by jcustenborder.
the class Parser method findParser.
TypeParser findParser(Schema schema) {
ParserKey parserKey = new ParserKey(schema);
TypeParser parser = this.typeParsers.get(parserKey);
if (null == parser) {
throw new UnsupportedOperationException(String.format("Schema %s(%s) is not supported", schema.type(), schema.name()));
}
return parser;
}
use of com.github.jcustenborder.kafka.connect.utils.data.type.TypeParser in project connect-utils by jcustenborder.
the class Parser method parseString.
/**
* Method is used to parse String data to the proper Java types.
*
* @param schema Input schema to parse the String data by.
* @param input Java type specific to the schema supplied.
* @return Java type for the
* @throws DataException Exception is thrown when there is an exception thrown while parsing the input string.
* @throws UnsupportedOperationException Exception is thrown if there is no type parser registered for the schema.
* @throws NullPointerException Exception is thrown if the schema passed is not optional and a null input value is passed.
*/
public Object parseString(Schema schema, String input) {
checkSchemaAndInput(schema, input);
if (null == input) {
return null;
}
TypeParser parser = findParser(schema);
try {
Object result = parser.parseString(input, schema);
return result;
} catch (Exception ex) {
String message = String.format("Could not parse '%s' to '%s'", input, parser.expectedClass().getSimpleName());
throw new DataException(message, ex);
}
}
Aggregations