use of org.codehaus.jackson.map.JsonMappingException in project hive by apache.
the class Vertex method extractOpTree.
/**
* @throws JSONException
* @throws JsonParseException
* @throws JsonMappingException
* @throws IOException
* @throws Exception
* We assume that there is a single top-level Map Operator Tree or a
* Reduce Operator Tree in a vertex
*/
public void extractOpTree() throws JSONException, JsonParseException, JsonMappingException, IOException, Exception {
if (vertexObject.length() != 0) {
for (String key : JSONObject.getNames(vertexObject)) {
if (key.equals("Map Operator Tree:")) {
extractOp(vertexObject.getJSONArray(key).getJSONObject(0), null);
} else if (key.equals("Reduce Operator Tree:") || key.equals("Processor Tree:")) {
extractOp(vertexObject.getJSONObject(key), null);
} else if (key.equals("Join:")) {
// this is the case when we have a map-side SMB join
// one input of the join is treated as a dummy vertex
JSONArray array = vertexObject.getJSONArray(key);
for (int index = 0; index < array.length(); index++) {
JSONObject mpOpTree = array.getJSONObject(index);
Vertex v = new Vertex(null, mpOpTree, this.stage, parser);
v.extractOpTree();
v.dummy = true;
mergeJoinDummyVertexs.add(v);
}
} else if (key.equals("Merge File Operator")) {
JSONObject opTree = vertexObject.getJSONObject(key);
if (opTree.has("Map Operator Tree:")) {
extractOp(opTree.getJSONArray("Map Operator Tree:").getJSONObject(0), null);
} else {
throw new Exception("Merge File Operator does not have a Map Operator Tree");
}
} else if (key.equals("Execution mode:")) {
executionMode = " " + vertexObject.getString(key);
} else if (key.equals("tagToInput:")) {
JSONObject tagToInput = vertexObject.getJSONObject(key);
for (String tag : JSONObject.getNames(tagToInput)) {
this.tagToInput.put(tag, (String) tagToInput.get(tag));
}
} else if (key.equals("tag:")) {
this.tag = vertexObject.getString(key);
} else if (key.equals("Local Work:")) {
extractOp(vertexObject.getJSONObject(key), null);
} else {
LOG.warn("Skip unsupported " + key + " in vertex " + this.name);
}
}
}
}
use of org.codehaus.jackson.map.JsonMappingException in project wonderdog by infochimps-labs.
the class ElasticSearchStorage method putNext.
/**
* Here we handle both the delimited record case and the json case.
*/
@SuppressWarnings("unchecked")
@Override
public void putNext(Tuple t) throws IOException {
UDFContext context = UDFContext.getUDFContext();
Properties property = context.getUDFProperties(ResourceSchema.class);
MapWritable record = new MapWritable();
String isJson = property.getProperty(ES_IS_JSON);
// Handle delimited records (ie. isJson == false)
if (isJson != null && isJson.equals("false")) {
String[] fieldNames = property.getProperty(PIG_ES_FIELD_NAMES).split(COMMA);
for (int i = 0; i < t.size(); i++) {
if (i < fieldNames.length) {
try {
record.put(new Text(fieldNames[i]), new Text(t.get(i).toString()));
} catch (NullPointerException e) {
// LOG.info("Increment null field counter.");
}
}
}
} else {
if (!t.isNull(0)) {
String jsonData = t.get(0).toString();
// parse json data and put into mapwritable record
try {
HashMap<String, Object> data = mapper.readValue(jsonData, HashMap.class);
record = (MapWritable) toWritable(data);
} catch (JsonParseException e) {
e.printStackTrace();
} catch (JsonMappingException e) {
e.printStackTrace();
}
}
}
try {
writer.write(NullWritable.get(), record);
} catch (InterruptedException e) {
throw new IOException(e);
}
}
Aggregations