use of org.codehaus.jackson.JsonParseException in project stanbol by apache.
the class AnalyzedTextParser method parseAnnotation.
private void parseAnnotation(Span span, String key, ObjectNode jValue) throws IOException {
JsonNode jClass = jValue.path("class");
if (!jClass.isTextual()) {
log.warn("unable to parse Annotation {} because 'class' field " + "is not set or not a stringis no JSON object (ignored, json: {}", key, jValue);
return;
}
Class<?> clazz;
try {
clazz = AnalyzedTextParser.class.getClassLoader().loadClass(jClass.getTextValue());
} catch (ClassNotFoundException e) {
log.warn("Unable to parse Annotation " + key + " because the 'class' " + jClass.getTextValue() + " of the " + "the value can not be resolved (ignored, json: " + jValue + ")", e);
return;
}
ValueTypeParser<?> parser = this.valueTypeParserRegistry.getParser(clazz);
Object value;
if (parser != null) {
value = parser.parse(jValue, span.getContext());
} else {
JsonNode valueNode = jValue.path("value");
if (valueNode.isMissingNode()) {
log.warn("unable to parse value for annotation {} because the " + "field 'value' is not present (ignored, json: {}", key, jValue);
return;
} else {
try {
value = mapper.treeToValue(valueNode, clazz);
} catch (JsonParseException e) {
log.warn("unable to parse value for annotation " + key + "because the value can" + "not be converted to the class " + clazz.getName() + "(ignored, json: " + jValue + ")", e);
return;
} catch (JsonMappingException e) {
log.warn("unable to parse value for annotation " + key + "because the value can" + "not be converted to the class " + clazz.getName() + "(ignored, json: " + jValue + ")", e);
return;
}
}
}
JsonNode jProb = jValue.path("prob");
if (!jProb.isDouble()) {
span.addValue(key, Value.value(value));
} else {
span.addValue(key, Value.value(value, jProb.getDoubleValue()));
}
}
use of org.codehaus.jackson.JsonParseException in project hive by apache.
the class Vertex method extractOpTree.
/**
* @throws JSONException
* @throws JsonParseException
* @throws JsonMappingException
* @throws IOException
* @throws Exception
* We assume that there is a single top-level Map Operator Tree or a
* Reduce Operator Tree in a vertex
*/
public void extractOpTree() throws JSONException, JsonParseException, JsonMappingException, IOException, Exception {
if (vertexObject.length() != 0) {
for (String key : JSONObject.getNames(vertexObject)) {
if (key.equals("Map Operator Tree:")) {
extractOp(vertexObject.getJSONArray(key).getJSONObject(0), null);
} else if (key.equals("Reduce Operator Tree:") || key.equals("Processor Tree:")) {
extractOp(vertexObject.getJSONObject(key), null);
} else if (key.equals("Join:")) {
// this is the case when we have a map-side SMB join
// one input of the join is treated as a dummy vertex
JSONArray array = vertexObject.getJSONArray(key);
for (int index = 0; index < array.length(); index++) {
JSONObject mpOpTree = array.getJSONObject(index);
Vertex v = new Vertex(null, mpOpTree, this.stage, parser);
v.extractOpTree();
v.dummy = true;
mergeJoinDummyVertexs.add(v);
}
} else if (key.equals("Merge File Operator")) {
JSONObject opTree = vertexObject.getJSONObject(key);
if (opTree.has("Map Operator Tree:")) {
extractOp(opTree.getJSONArray("Map Operator Tree:").getJSONObject(0), null);
} else {
throw new Exception("Merge File Operator does not have a Map Operator Tree");
}
} else if (key.equals("Execution mode:")) {
executionMode = " " + vertexObject.getString(key);
} else if (key.equals("tagToInput:")) {
JSONObject tagToInput = vertexObject.getJSONObject(key);
for (String tag : JSONObject.getNames(tagToInput)) {
this.tagToInput.put(tag, (String) tagToInput.get(tag));
}
} else if (key.equals("tag:")) {
this.tag = vertexObject.getString(key);
} else if (key.equals("Local Work:")) {
extractOp(vertexObject.getJSONObject(key), null);
} else {
LOG.warn("Skip unsupported " + key + " in vertex " + this.name);
}
}
}
}
use of org.codehaus.jackson.JsonParseException in project wonderdog by infochimps-labs.
the class ElasticSearchStorage method putNext.
/**
* Here we handle both the delimited record case and the json case.
*/
@SuppressWarnings("unchecked")
@Override
public void putNext(Tuple t) throws IOException {
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(ResourceSchema.class);
MapWritable record = new MapWritable();
String isJson = property.getProperty(ES_IS_JSON);
// Handle delimited records (ie. isJson == false)
if (isJson != null && isJson.equals("false")) {
String[] fieldNames = property.getProperty(PIG_ES_FIELD_NAMES).split(COMMA);
for (int i = 0; i < t.size(); i++) {
if (i < fieldNames.length) {
try {
record.put(new Text(fieldNames[i]), new Text(t.get(i).toString()));
} catch (NullPointerException e) {
// LOG.info("Increment null field counter.");
}
}
}
} else {
if (!t.isNull(0)) {
String jsonData = t.get(0).toString();
// parse json data and put into mapwritable record
try {
HashMap<String, Object> data = mapper.readValue(jsonData, HashMap.class);
record = (MapWritable) toWritable(data);
} catch (JsonParseException e) {
e.printStackTrace();
} catch (JsonMappingException e) {
e.printStackTrace();
}
}
}
try {
writer.write(NullWritable.get(), record);
} catch (InterruptedException e) {
throw new IOException(e);
}
}
Aggregations