use of org.json.JSONArray in project IceNet by anton46.
the class NetworkManager method fromJsonArray.
private void fromJsonArray(final Map<String, String> headers, String requestTag, final RequestCallback requestCallback) {
JsonArrayRequest request = new JsonArrayRequest(getUrlConnection(pathUrl), new Response.Listener<JSONArray>() {
@Override
public void onResponse(JSONArray jsonArray) {
Object t = new Gson().fromJson(jsonArray.toString(), classTarget.getType());
if (requestCallback != null)
requestCallback.onRequestSuccess(t);
}
}, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
if (requestCallback != null) {
NetworkResponse response = error.networkResponse;
if (response != null)
requestCallback.onRequestError(new RequestError(response));
}
}
}) {
@Override
public Map<String, String> getHeaders() throws AuthFailureError {
return headers != null ? headers : super.getHeaders();
}
};
networkHelper.addToRequestQueue(request, requestTag);
}
use of org.json.JSONArray in project hive by apache.
the class ExplainTask method outputPlan.
private JSONObject outputPlan(Object work, PrintStream out, boolean extended, boolean jsonOutput, int indent, String appendToHeader) throws Exception {
// Check if work has an explain annotation
Annotation note = AnnotationUtils.getAnnotation(work.getClass(), Explain.class);
String keyJSONObject = null;
if (note instanceof Explain) {
Explain xpl_note = (Explain) note;
boolean invokeFlag = false;
if (this.work != null && this.work.isUserLevelExplain()) {
invokeFlag = Level.USER.in(xpl_note.explainLevels());
} else {
if (extended) {
invokeFlag = Level.EXTENDED.in(xpl_note.explainLevels());
} else {
invokeFlag = Level.DEFAULT.in(xpl_note.explainLevels());
}
}
if (invokeFlag) {
Vectorization vectorization = xpl_note.vectorization();
if (this.work != null && this.work.isVectorization()) {
// The EXPLAIN VECTORIZATION option was specified.
final boolean desireOnly = this.work.isVectorizationOnly();
final VectorizationDetailLevel desiredVecDetailLevel = this.work.isVectorizationDetailLevel();
switch(vectorization) {
case NON_VECTORIZED:
// Display all non-vectorized leaf objects unless ONLY.
if (desireOnly) {
invokeFlag = false;
}
break;
case SUMMARY:
case OPERATOR:
case EXPRESSION:
case DETAIL:
if (vectorization.rank < desiredVecDetailLevel.rank) {
// This detail not desired.
invokeFlag = false;
}
break;
case SUMMARY_PATH:
case OPERATOR_PATH:
if (desireOnly) {
if (vectorization.rank < desiredVecDetailLevel.rank) {
// Suppress headers and all objects below.
invokeFlag = false;
}
}
break;
default:
throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization);
}
} else {
// Do not display vectorization objects.
switch(vectorization) {
case SUMMARY:
case OPERATOR:
case EXPRESSION:
case DETAIL:
invokeFlag = false;
break;
case NON_VECTORIZED:
// No action.
break;
case SUMMARY_PATH:
case OPERATOR_PATH:
// Always include headers since they contain non-vectorized objects, too.
break;
default:
throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization);
}
}
}
if (invokeFlag) {
keyJSONObject = xpl_note.displayName();
if (out != null) {
out.print(indentString(indent));
if (appendToHeader != null && !appendToHeader.isEmpty()) {
out.println(xpl_note.displayName() + appendToHeader);
} else {
out.println(xpl_note.displayName());
}
}
}
}
JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null;
// conf and then the children
if (work instanceof Operator) {
Operator<? extends OperatorDesc> operator = (Operator<? extends OperatorDesc>) work;
if (operator.getConf() != null) {
String appender = isLogical ? " (" + operator.getOperatorId() + ")" : "";
JSONObject jsonOut = outputPlan(operator.getConf(), out, extended, jsonOutput, jsonOutput ? 0 : indent, appender);
if (this.work != null && (this.work.isUserLevelExplain() || this.work.isFormatted())) {
if (jsonOut != null && jsonOut.length() > 0) {
((JSONObject) jsonOut.get(JSONObject.getNames(jsonOut)[0])).put("OperatorId:", operator.getOperatorId());
if (!this.work.isUserLevelExplain() && this.work.isFormatted() && operator instanceof ReduceSinkOperator) {
List<String> outputOperators = ((ReduceSinkOperator) operator).getConf().getOutputOperators();
if (outputOperators != null) {
((JSONObject) jsonOut.get(JSONObject.getNames(jsonOut)[0])).put(OUTPUT_OPERATORS, Arrays.toString(outputOperators.toArray()));
}
}
}
}
if (jsonOutput) {
json = jsonOut;
}
}
if (!visitedOps.contains(operator) || !isLogical) {
visitedOps.add(operator);
if (operator.getChildOperators() != null) {
int cindent = jsonOutput ? 0 : indent + 2;
for (Operator<? extends OperatorDesc> op : operator.getChildOperators()) {
JSONObject jsonOut = outputPlan(op, out, extended, jsonOutput, cindent);
if (jsonOutput) {
((JSONObject) json.get(JSONObject.getNames(json)[0])).accumulate("children", jsonOut);
}
}
}
}
if (jsonOutput) {
return json;
}
return null;
}
// We look at all methods that generate values for explain
Method[] methods = work.getClass().getMethods();
Arrays.sort(methods, new MethodComparator());
for (Method m : methods) {
int prop_indents = jsonOutput ? 0 : indent + 2;
note = AnnotationUtils.getAnnotation(m, Explain.class);
if (note instanceof Explain) {
Explain xpl_note = (Explain) note;
boolean invokeFlag = false;
if (this.work != null && this.work.isUserLevelExplain()) {
invokeFlag = Level.USER.in(xpl_note.explainLevels());
} else {
if (extended) {
invokeFlag = Level.EXTENDED.in(xpl_note.explainLevels());
} else {
invokeFlag = Level.DEFAULT.in(xpl_note.explainLevels());
}
}
if (invokeFlag) {
Vectorization vectorization = xpl_note.vectorization();
if (this.work != null && this.work.isVectorization()) {
// The EXPLAIN VECTORIZATION option was specified.
final boolean desireOnly = this.work.isVectorizationOnly();
final VectorizationDetailLevel desiredVecDetailLevel = this.work.isVectorizationDetailLevel();
switch(vectorization) {
case NON_VECTORIZED:
// Display all non-vectorized leaf objects unless ONLY.
if (desireOnly) {
invokeFlag = false;
}
break;
case SUMMARY:
case OPERATOR:
case EXPRESSION:
case DETAIL:
if (vectorization.rank < desiredVecDetailLevel.rank) {
// This detail not desired.
invokeFlag = false;
}
break;
case SUMMARY_PATH:
case OPERATOR_PATH:
if (desireOnly) {
if (vectorization.rank < desiredVecDetailLevel.rank) {
// Suppress headers and all objects below.
invokeFlag = false;
}
}
break;
default:
throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization);
}
} else {
// Do not display vectorization objects.
switch(vectorization) {
case SUMMARY:
case OPERATOR:
case EXPRESSION:
case DETAIL:
invokeFlag = false;
break;
case NON_VECTORIZED:
// No action.
break;
case SUMMARY_PATH:
case OPERATOR_PATH:
// Always include headers since they contain non-vectorized objects, too.
break;
default:
throw new RuntimeException("Unknown EXPLAIN vectorization " + vectorization);
}
}
}
if (invokeFlag) {
Object val = null;
try {
val = m.invoke(work);
} catch (InvocationTargetException ex) {
// Ignore the exception, this may be caused by external jars
val = null;
}
if (val == null) {
continue;
}
String header = null;
boolean skipHeader = xpl_note.skipHeader();
boolean emptyHeader = false;
if (!xpl_note.displayName().equals("")) {
header = indentString(prop_indents) + xpl_note.displayName() + ":";
} else {
emptyHeader = true;
prop_indents = indent;
header = indentString(prop_indents);
}
// Try the output as a primitive object
if (isPrintable(val)) {
if (out != null && shouldPrint(xpl_note, val)) {
if (!skipHeader) {
out.print(header);
out.print(" ");
}
out.println(val);
}
if (jsonOutput && shouldPrint(xpl_note, val)) {
json.put(header, val.toString());
}
continue;
}
int ind = 0;
if (!jsonOutput) {
if (!skipHeader) {
ind = prop_indents + 2;
} else {
ind = indent;
}
}
// Try this as a map
if (val instanceof Map) {
// Go through the map and print out the stuff
Map<?, ?> mp = (Map<?, ?>) val;
if (out != null && !skipHeader && mp != null && !mp.isEmpty()) {
out.print(header);
}
JSONObject jsonOut = outputMap(mp, !skipHeader && !emptyHeader, out, extended, jsonOutput, ind);
if (jsonOutput && !mp.isEmpty()) {
json.put(header, jsonOut);
}
continue;
}
// Try this as a list
if (val instanceof List || val instanceof Set) {
List l = val instanceof List ? (List) val : new ArrayList((Set) val);
if (out != null && !skipHeader && l != null && !l.isEmpty()) {
out.print(header);
}
JSONArray jsonOut = outputList(l, out, !skipHeader && !emptyHeader, extended, jsonOutput, ind);
if (jsonOutput && !l.isEmpty()) {
json.put(header, jsonOut);
}
continue;
}
// Finally check if it is serializable
try {
if (!skipHeader && out != null) {
out.println(header);
}
JSONObject jsonOut = outputPlan(val, out, extended, jsonOutput, ind);
if (jsonOutput && jsonOut != null && jsonOut.length() != 0) {
if (!skipHeader) {
json.put(header, jsonOut);
} else {
for (String k : JSONObject.getNames(jsonOut)) {
json.put(k, jsonOut.get(k));
}
}
}
continue;
} catch (ClassCastException ce) {
// Ignore
}
}
}
}
if (jsonOutput) {
if (keyJSONObject != null) {
JSONObject ret = new JSONObject(new LinkedHashMap<>());
ret.put(keyJSONObject, json);
return ret;
}
return json;
}
return null;
}
use of org.json.JSONArray in project hive by apache.
the class ExplainTask method outputList.
private JSONArray outputList(List<?> l, PrintStream out, boolean hasHeader, boolean extended, boolean jsonOutput, int indent) throws Exception {
boolean first_el = true;
boolean nl = false;
JSONArray outputArray = new JSONArray();
for (Object o : l) {
if (isPrintable(o)) {
String delim = first_el ? " " : ", ";
if (out != null) {
out.print(delim);
out.print(o);
}
if (jsonOutput) {
outputArray.put(o);
}
nl = true;
} else {
if (first_el && (out != null) && hasHeader) {
out.println();
}
JSONObject jsonOut = outputPlan(o, out, extended, jsonOutput, jsonOutput ? 0 : (hasHeader ? indent + 2 : indent));
if (jsonOutput) {
outputArray.put(jsonOut);
}
}
first_el = false;
}
if (nl && (out != null)) {
out.println();
}
return jsonOutput ? outputArray : null;
}
use of org.json.JSONArray in project hive by apache.
the class EximUtil method readMetaData.
public static ReadMetaData readMetaData(FileSystem fs, Path metadataPath) throws IOException, SemanticException {
FSDataInputStream mdstream = null;
try {
mdstream = fs.open(metadataPath);
byte[] buffer = new byte[1024];
ByteArrayOutputStream sb = new ByteArrayOutputStream();
int read = mdstream.read(buffer);
while (read != -1) {
sb.write(buffer, 0, read);
read = mdstream.read(buffer);
}
String md = new String(sb.toByteArray(), "UTF-8");
JSONObject jsonContainer = new JSONObject(md);
String version = jsonContainer.getString("version");
String fcversion = getJSONStringEntry(jsonContainer, "fcversion");
checkCompatibility(version, fcversion);
String dbDesc = getJSONStringEntry(jsonContainer, "db");
String tableDesc = getJSONStringEntry(jsonContainer, "table");
TDeserializer deserializer = new TDeserializer(new TJSONProtocol.Factory());
Database db = null;
if (dbDesc != null) {
db = new Database();
deserializer.deserialize(db, dbDesc, "UTF-8");
}
Table table = null;
List<Partition> partitionsList = null;
if (tableDesc != null) {
table = new Table();
deserializer.deserialize(table, tableDesc, "UTF-8");
// TODO : jackson-streaming-iterable-redo this
JSONArray jsonPartitions = new JSONArray(jsonContainer.getString("partitions"));
partitionsList = new ArrayList<Partition>(jsonPartitions.length());
for (int i = 0; i < jsonPartitions.length(); ++i) {
String partDesc = jsonPartitions.getString(i);
Partition partition = new Partition();
deserializer.deserialize(partition, partDesc, "UTF-8");
partitionsList.add(partition);
}
}
return new ReadMetaData(db, table, partitionsList, readReplicationSpec(jsonContainer));
} catch (JSONException e) {
throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METADATA.getMsg(), e);
} catch (TException e) {
throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METADATA.getMsg(), e);
} finally {
if (mdstream != null) {
mdstream.close();
}
}
}
use of org.json.JSONArray in project hive by apache.
the class Op method inlineJoinOp.
private void inlineJoinOp() throws Exception {
// inline map join operator
if (this.type == OpType.MAPJOIN) {
JSONObject joinObj = opObject.getJSONObject(this.name);
// get the map for posToVertex
JSONObject verticeObj = joinObj.getJSONObject("input vertices:");
Map<String, Vertex> posToVertex = new LinkedHashMap<>();
for (String pos : JSONObject.getNames(verticeObj)) {
String vertexName = verticeObj.getString(pos);
// update the connection
Connection c = null;
for (Connection connection : vertex.parentConnections) {
if (connection.from.name.equals(vertexName)) {
posToVertex.put(pos, connection.from);
c = connection;
break;
}
}
if (c != null) {
parser.addInline(this, c);
}
}
// update the attrs
this.attrs.remove("input vertices:");
// update the keys to use operator name
JSONObject keys = joinObj.getJSONObject("keys:");
// find out the vertex for the big table
Set<Vertex> parentVertexes = new HashSet<>();
for (Connection connection : vertex.parentConnections) {
parentVertexes.add(connection.from);
}
parentVertexes.removeAll(posToVertex.values());
Map<String, String> posToOpId = new LinkedHashMap<>();
if (keys.length() != 0) {
for (String key : JSONObject.getNames(keys)) {
// first search from the posToVertex
if (posToVertex.containsKey(key)) {
Vertex vertex = posToVertex.get(key);
if (vertex.rootOps.size() == 1) {
posToOpId.put(key, vertex.rootOps.get(0).operatorId);
} else if ((vertex.rootOps.size() == 0 && vertex.vertexType == VertexType.UNION)) {
posToOpId.put(key, vertex.name);
} else {
Op singleRSOp = vertex.getSingleRSOp();
if (singleRSOp != null) {
posToOpId.put(key, singleRSOp.operatorId);
} else {
throw new Exception("There are none or more than one root operators in a single vertex " + vertex.name + " when hive explain user is trying to identify the operator id.");
}
}
} else // then search from parent
if (parent != null) {
posToOpId.put(key, parent.operatorId);
} else // then assume it is from its own vertex
if (parentVertexes.size() == 1) {
Vertex vertex = parentVertexes.iterator().next();
parentVertexes.clear();
if (vertex.rootOps.size() == 1) {
posToOpId.put(key, vertex.rootOps.get(0).operatorId);
} else if ((vertex.rootOps.size() == 0 && vertex.vertexType == VertexType.UNION)) {
posToOpId.put(key, vertex.name);
} else {
Op singleRSOp = vertex.getSingleRSOp();
if (singleRSOp != null) {
posToOpId.put(key, singleRSOp.operatorId);
} else {
throw new Exception("There are none or more than one root operators in a single vertex " + vertex.name + " when hive explain user is trying to identify the operator id.");
}
}
} else // finally throw an exception
{
throw new Exception("Can not find the source operator on one of the branches of map join.");
}
}
}
this.attrs.remove("keys:");
StringBuffer sb = new StringBuffer();
JSONArray conditionMap = joinObj.getJSONArray("condition map:");
for (int index = 0; index < conditionMap.length(); index++) {
JSONObject cond = conditionMap.getJSONObject(index);
String k = (String) cond.keys().next();
JSONObject condObject = new JSONObject((String) cond.get(k));
String type = condObject.getString("type");
String left = condObject.getString("left");
String right = condObject.getString("right");
if (keys.length() != 0) {
sb.append(posToOpId.get(left) + "." + keys.get(left) + "=" + posToOpId.get(right) + "." + keys.get(right) + "(" + type + "),");
} else {
// probably a cross product
sb.append("(" + type + "),");
}
}
this.attrs.remove("condition map:");
this.attrs.put("Conds:", sb.substring(0, sb.length() - 1));
} else // should be merge join
{
Map<String, String> posToOpId = new LinkedHashMap<>();
if (vertex.mergeJoinDummyVertexs.size() == 0) {
if (vertex.tagToInput.size() != vertex.parentConnections.size()) {
throw new Exception("tagToInput size " + vertex.tagToInput.size() + " is different from parentConnections size " + vertex.parentConnections.size());
}
for (Entry<String, String> entry : vertex.tagToInput.entrySet()) {
Connection c = null;
for (Connection connection : vertex.parentConnections) {
if (connection.from.name.equals(entry.getValue())) {
Vertex v = connection.from;
if (v.rootOps.size() == 1) {
posToOpId.put(entry.getKey(), v.rootOps.get(0).operatorId);
} else if ((v.rootOps.size() == 0 && v.vertexType == VertexType.UNION)) {
posToOpId.put(entry.getKey(), v.name);
} else {
Op singleRSOp = v.getSingleRSOp();
if (singleRSOp != null) {
posToOpId.put(entry.getKey(), singleRSOp.operatorId);
} else {
throw new Exception("There are none or more than one root operators in a single vertex " + v.name + " when hive explain user is trying to identify the operator id.");
}
}
c = connection;
break;
}
}
if (c == null) {
throw new Exception("Can not find " + entry.getValue() + " while parsing keys of merge join operator");
}
}
} else {
posToOpId.put(vertex.tag, this.parent.operatorId);
for (Vertex v : vertex.mergeJoinDummyVertexs) {
if (v.rootOps.size() != 1) {
throw new Exception("Can not find a single root operators in a single vertex " + v.name + " when hive explain user is trying to identify the operator id.");
}
posToOpId.put(v.tag, v.rootOps.get(0).operatorId);
}
}
JSONObject joinObj = opObject.getJSONObject(this.name);
// update the keys to use operator name
JSONObject keys = joinObj.getJSONObject("keys:");
if (keys.length() != 0) {
for (String key : JSONObject.getNames(keys)) {
if (!posToOpId.containsKey(key)) {
throw new Exception("Can not find the source operator on one of the branches of merge join.");
}
}
// inline merge join operator in a self-join
if (this.vertex != null) {
for (Vertex v : this.vertex.mergeJoinDummyVertexs) {
parser.addInline(this, new Connection(null, v));
}
}
}
// update the attrs
this.attrs.remove("keys:");
StringBuffer sb = new StringBuffer();
JSONArray conditionMap = joinObj.getJSONArray("condition map:");
for (int index = 0; index < conditionMap.length(); index++) {
JSONObject cond = conditionMap.getJSONObject(index);
String k = (String) cond.keys().next();
JSONObject condObject = new JSONObject((String) cond.get(k));
String type = condObject.getString("type");
String left = condObject.getString("left");
String right = condObject.getString("right");
if (keys.length() != 0) {
sb.append(posToOpId.get(left) + "." + keys.get(left) + "=" + posToOpId.get(right) + "." + keys.get(right) + "(" + type + "),");
} else {
// probably a cross product
sb.append("(" + type + "),");
}
}
this.attrs.remove("condition map:");
this.attrs.put("Conds:", sb.substring(0, sb.length() - 1));
}
}
Aggregations