use of org.codehaus.jackson.JsonParseException in project meteo by pierre.
the class TopicListener method onMessage.
@Override
public void onMessage(Message message) {
if (message instanceof TextMessage) {
TextMessage txtMsg = (TextMessage) message;
String txt = null;
try {
txt = txtMsg.getText();
log.debug("Received a message, yay!\n" + txt);
Map event = mapper.readValue(txt, Map.class);
esperSink.getEPRuntime().sendEvent(event, esperTopicKey);
} catch (JMSException ex) {
log.warn("Got an error from the message queue", ex);
} catch (ClassCastException ex) {
log.info("Received message that I couldn't parse: " + txt, ex);
} catch (JsonMappingException ex) {
log.info("Received message that I couldn't parse: " + txt, ex);
} catch (JsonParseException ex) {
log.info("Received message that I couldn't parse: " + txt, ex);
} catch (IOException ex) {
log.warn("Got an error from the message queue", ex);
}
} else if (message instanceof BytesMessage) {
final BytesMessage byteMessage = (BytesMessage) message;
long llen;
try {
llen = byteMessage.getBodyLength();
} catch (JMSException e) {
log.warn("Unable to get message length", e);
return;
}
if (llen > Integer.MAX_VALUE) {
// should never occur but...
log.error("Ridiculously huge message payload, above 32-bit length");
} else {
final int len = (int) llen;
final byte[] data = new byte[len];
final int readLen;
try {
readLen = byteMessage.readBytes(data);
} catch (JMSException e) {
log.warn("Unable to get message bytes", e);
return;
}
if (readLen < len) {
log.error("Failed to read byte message contents; read {}, was trying to read {}", readLen, data.length);
} else {
final Map event;
try {
event = mapper.readValue(data, Map.class);
esperSink.getEPRuntime().sendEvent(event, esperTopicKey);
} catch (IOException e) {
log.error("Failed to convert message to Esper Event", readLen, data.length);
}
}
}
} else {
log.error("Unexpected message type '{}' from AMQ broker: must skip", message.getClass().getName());
}
}
use of org.codehaus.jackson.JsonParseException in project databus by linkedin.
the class DbusEventUtils method getCheckpointFromEvent.
/**
* Utility method to extract a checkpoint from a DbusEvent
* Note: Ensure that this is a Checkpoint event before calling this method.
* @param event
* @return
*/
public static Checkpoint getCheckpointFromEvent(DbusEvent event) {
assert (event.isCheckpointMessage());
ByteBuffer valueBuffer = event.value();
byte[] valueBytes = new byte[valueBuffer.limit()];
valueBuffer.get(valueBytes);
try {
Checkpoint newCheckpoint = new Checkpoint(new String(valueBytes));
return newCheckpoint;
} catch (JsonParseException e) {
throw new RuntimeException(e);
} catch (JsonMappingException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
use of org.codehaus.jackson.JsonParseException in project gora by apache.
the class AbstractGoraMojo method compile.
protected void compile() throws IOException {
File sourceDirectory = getSourcesDirectory();
File outputDirectory = getOutputDirectory();
if (!outputDirectory.exists()) {
outputDirectory.mkdirs();
}
Scanner fileScanner = context.newScanner(sourceDirectory, true);
fileScanner.setIncludes(includes);
fileScanner.setExcludes(excludes);
fileScanner.scan();
File basedir = fileScanner.getBasedir();
List<File> changedFiles = new ArrayList<File>();
for (String fileName : fileScanner.getIncludedFiles()) {
File file = new File(basedir, fileName);
changedFiles.add(file);
context.removeMessages(file);
}
if (!changedFiles.isEmpty()) {
try {
File[] schemaFile = changedFiles.toArray(new File[changedFiles.size()]);
GoraCompiler.compileSchema(schemaFile, outputDirectory);
} catch (SchemaParseException e) {
if (e.getCause() != null && e.getCause() instanceof JsonParseException) {
attachErrorMessage((JsonParseException) e.getCause());
} else {
throw e;
}
}
}
context.refresh(outputDirectory);
}
use of org.codehaus.jackson.JsonParseException in project hive by apache.
the class Vertex method extractOpTree.
/**
* @throws JSONException
* @throws JsonParseException
* @throws JsonMappingException
* @throws IOException
* @throws Exception
* We assume that there is a single top-level Map Operator Tree or a
* Reduce Operator Tree in a vertex
*/
public void extractOpTree() throws JSONException, JsonParseException, JsonMappingException, IOException, Exception {
if (vertexObject.length() != 0) {
for (String key : JSONObject.getNames(vertexObject)) {
if (key.equals("Map Operator Tree:")) {
extractOp(vertexObject.getJSONArray(key).getJSONObject(0));
} else if (key.equals("Reduce Operator Tree:") || key.equals("Processor Tree:")) {
extractOp(vertexObject.getJSONObject(key));
} else if (key.equals("Join:")) {
// this is the case when we have a map-side SMB join
// one input of the join is treated as a dummy vertex
JSONArray array = vertexObject.getJSONArray(key);
for (int index = 0; index < array.length(); index++) {
JSONObject mpOpTree = array.getJSONObject(index);
Vertex v = new Vertex(null, mpOpTree, parser);
v.extractOpTree();
v.dummy = true;
mergeJoinDummyVertexs.add(v);
}
} else if (key.equals("Merge File Operator")) {
JSONObject opTree = vertexObject.getJSONObject(key);
if (opTree.has("Map Operator Tree:")) {
extractOp(opTree.getJSONArray("Map Operator Tree:").getJSONObject(0));
} else {
throw new Exception("Merge File Operator does not have a Map Operator Tree");
}
} else if (key.equals("Execution mode:")) {
executionMode = " " + vertexObject.getString(key);
} else if (key.equals("tagToInput:")) {
JSONObject tagToInput = vertexObject.getJSONObject(key);
for (String tag : JSONObject.getNames(tagToInput)) {
this.tagToInput.put(tag, (String) tagToInput.get(tag));
}
} else if (key.equals("tag:")) {
this.tag = vertexObject.getString(key);
} else {
throw new Exception("Unsupported operator tree in vertex " + this.name);
}
}
}
}
use of org.codehaus.jackson.JsonParseException in project hive by apache.
the class Vertex method extractOp.
/**
* @param operator
* @param parent
* @return
* @throws JSONException
* @throws JsonParseException
* @throws JsonMappingException
* @throws IOException
* @throws Exception
* assumption: each operator only has one parent but may have many
* children
*/
Op extractOp(JSONObject operator) throws JSONException, JsonParseException, JsonMappingException, IOException, Exception {
String[] names = JSONObject.getNames(operator);
if (names.length != 1) {
throw new Exception("Expect only one operator in " + operator.toString());
} else {
String opName = names[0];
JSONObject attrObj = (JSONObject) operator.get(opName);
Map<String, String> attrs = new TreeMap<>();
List<Op> children = new ArrayList<>();
String id = null;
String outputVertexName = null;
for (String attrName : JSONObject.getNames(attrObj)) {
if (attrName.equals("children")) {
Object childrenObj = attrObj.get(attrName);
if (childrenObj instanceof JSONObject) {
if (((JSONObject) childrenObj).length() != 0) {
children.add(extractOp((JSONObject) childrenObj));
}
} else if (childrenObj instanceof JSONArray) {
if (((JSONArray) childrenObj).length() != 0) {
JSONArray array = ((JSONArray) childrenObj);
for (int index = 0; index < array.length(); index++) {
children.add(extractOp(array.getJSONObject(index)));
}
}
} else {
throw new Exception("Unsupported operator " + this.name + "'s children operator is neither a jsonobject nor a jsonarray");
}
} else {
if (attrName.equals("OperatorId:")) {
id = attrObj.get(attrName).toString();
} else if (attrName.equals("outputname:")) {
outputVertexName = attrObj.get(attrName).toString();
} else {
if (!attrObj.get(attrName).toString().isEmpty()) {
attrs.put(attrName, attrObj.get(attrName).toString());
}
}
}
}
Op op = new Op(opName, id, outputVertexName, children, attrs, operator, this, parser);
if (!children.isEmpty()) {
for (Op child : children) {
child.parent = op;
}
} else {
this.rootOps.add(op);
}
return op;
}
}
Aggregations