use of org.json.JSONObject in project hive by apache.
the class ExplainTask method getJSONDependencies.
/*
* Below method returns the dependencies for the passed in query to EXPLAIN.
* The dependencies are the set of input tables and partitions, and are
* provided back as JSON output for the EXPLAIN command.
* Example output:
* {"input_tables":[{"tablename": "default@test_sambavi_v1", "tabletype": "TABLE"}],
* "input partitions":["default@srcpart@ds=2008-04-08/hr=11"]}
*/
private static JSONObject getJSONDependencies(ExplainWork work) throws Exception {
assert (work.getDependency());
JSONObject outJSONObject = new JSONObject(new LinkedHashMap<>());
List<Map<String, String>> inputTableInfo = new ArrayList<Map<String, String>>();
List<Map<String, String>> inputPartitionInfo = new ArrayList<Map<String, String>>();
for (ReadEntity input : work.getInputs()) {
switch(input.getType()) {
case TABLE:
Table table = input.getTable();
Map<String, String> tableInfo = new LinkedHashMap<String, String>();
tableInfo.put("tablename", table.getCompleteName());
tableInfo.put("tabletype", table.getTableType().toString());
if ((input.getParents() != null) && (!input.getParents().isEmpty())) {
tableInfo.put("tableParents", input.getParents().toString());
}
inputTableInfo.add(tableInfo);
break;
case PARTITION:
Map<String, String> partitionInfo = new HashMap<String, String>();
partitionInfo.put("partitionName", input.getPartition().getCompleteName());
if ((input.getParents() != null) && (!input.getParents().isEmpty())) {
partitionInfo.put("partitionParents", input.getParents().toString());
}
inputPartitionInfo.add(partitionInfo);
break;
default:
break;
}
}
outJSONObject.put("input_tables", inputTableInfo);
outJSONObject.put("input_partitions", inputPartitionInfo);
return outJSONObject;
}
use of org.json.JSONObject in project hive by apache.
the class ExplainTask method collectAuthRelatedEntities.
private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work) throws Exception {
BaseSemanticAnalyzer analyzer = work.getAnalyzer();
HiveOperation operation = queryState.getHiveOperation();
JSONObject object = new JSONObject(new LinkedHashMap<>());
Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work);
if (work.isFormatted()) {
object.put("INPUTS", jsonInput);
}
Object jsonOutput = toJson("OUTPUTS", toString(analyzer.getOutputs()), out, work);
if (work.isFormatted()) {
object.put("OUTPUTS", jsonOutput);
}
String userName = SessionState.get().getAuthenticator().getUserName();
Object jsonUser = toJson("CURRENT_USER", userName, out, work);
if (work.isFormatted()) {
object.put("CURRENT_USER", jsonUser);
}
Object jsonOperation = toJson("OPERATION", operation.name(), out, work);
if (work.isFormatted()) {
object.put("OPERATION", jsonOperation);
}
if (analyzer.skipAuthorization()) {
return object;
}
final List<String> exceptions = new ArrayList<String>();
Object delegate = SessionState.get().getActiveAuthorizer();
if (delegate != null) {
Class itface = SessionState.get().getAuthorizerInterface();
Object authorizer = AuthorizationFactory.create(delegate, itface, new AuthorizationFactory.AuthorizationExceptionHandler() {
public void exception(Exception exception) {
exceptions.add(exception.getMessage());
}
});
SessionState.get().setActiveAuthorizer(authorizer);
try {
Driver.doAuthorization(queryState.getHiveOperation(), analyzer, "");
} finally {
SessionState.get().setActiveAuthorizer(delegate);
}
}
if (!exceptions.isEmpty()) {
Object jsonFails = toJson("AUTHORIZATION_FAILURES", exceptions, out, work);
if (work.isFormatted()) {
object.put("AUTHORIZATION_FAILURES", jsonFails);
}
}
return object;
}
use of org.json.JSONObject in project hive by apache.
the class EximUtil method readMetaData.
public static ReadMetaData readMetaData(FileSystem fs, Path metadataPath) throws IOException, SemanticException {
FSDataInputStream mdstream = null;
try {
mdstream = fs.open(metadataPath);
byte[] buffer = new byte[1024];
ByteArrayOutputStream sb = new ByteArrayOutputStream();
int read = mdstream.read(buffer);
while (read != -1) {
sb.write(buffer, 0, read);
read = mdstream.read(buffer);
}
String md = new String(sb.toByteArray(), "UTF-8");
JSONObject jsonContainer = new JSONObject(md);
String version = jsonContainer.getString("version");
String fcversion = getJSONStringEntry(jsonContainer, "fcversion");
checkCompatibility(version, fcversion);
String dbDesc = getJSONStringEntry(jsonContainer, "db");
String tableDesc = getJSONStringEntry(jsonContainer, "table");
TDeserializer deserializer = new TDeserializer(new TJSONProtocol.Factory());
Database db = null;
if (dbDesc != null) {
db = new Database();
deserializer.deserialize(db, dbDesc, "UTF-8");
}
Table table = null;
List<Partition> partitionsList = null;
if (tableDesc != null) {
table = new Table();
deserializer.deserialize(table, tableDesc, "UTF-8");
// TODO : jackson-streaming-iterable-redo this
JSONArray jsonPartitions = new JSONArray(jsonContainer.getString("partitions"));
partitionsList = new ArrayList<Partition>(jsonPartitions.length());
for (int i = 0; i < jsonPartitions.length(); ++i) {
String partDesc = jsonPartitions.getString(i);
Partition partition = new Partition();
deserializer.deserialize(partition, partDesc, "UTF-8");
partitionsList.add(partition);
}
}
return new ReadMetaData(db, table, partitionsList, readReplicationSpec(jsonContainer));
} catch (JSONException e) {
throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METADATA.getMsg(), e);
} catch (TException e) {
throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METADATA.getMsg(), e);
} finally {
if (mdstream != null) {
mdstream.close();
}
}
}
use of org.json.JSONObject in project hive by apache.
the class JoinCondDesc method getUserLevelJoinCondString.
@Explain(explainLevels = { Level.USER })
public String getUserLevelJoinCondString() {
JSONObject join = new JSONObject(new LinkedHashMap());
try {
switch(type) {
case JoinDesc.INNER_JOIN:
join.put("type", "Inner");
break;
case JoinDesc.FULL_OUTER_JOIN:
join.put("type", "Outer");
break;
case JoinDesc.LEFT_OUTER_JOIN:
join.put("type", "Left Outer");
break;
case JoinDesc.RIGHT_OUTER_JOIN:
join.put("type", "Right Outer");
break;
case JoinDesc.UNIQUE_JOIN:
join.put("type", "Unique");
break;
case JoinDesc.LEFT_SEMI_JOIN:
join.put("type", "Left Semi");
break;
default:
join.put("type", "Unknow Join");
break;
}
join.put("left", left);
join.put("right", right);
} catch (JSONException e) {
// impossible to throw any json exceptions.
LOG.trace(e.getMessage());
}
return join.toString();
}
use of org.json.JSONObject in project hive by apache.
the class Op method inlineJoinOp.
private void inlineJoinOp() throws Exception {
// inline map join operator
if (this.type == OpType.MAPJOIN) {
JSONObject joinObj = opObject.getJSONObject(this.name);
// get the map for posToVertex
JSONObject verticeObj = joinObj.getJSONObject("input vertices:");
Map<String, Vertex> posToVertex = new LinkedHashMap<>();
for (String pos : JSONObject.getNames(verticeObj)) {
String vertexName = verticeObj.getString(pos);
// update the connection
Connection c = null;
for (Connection connection : vertex.parentConnections) {
if (connection.from.name.equals(vertexName)) {
posToVertex.put(pos, connection.from);
c = connection;
break;
}
}
if (c != null) {
parser.addInline(this, c);
}
}
// update the attrs
this.attrs.remove("input vertices:");
// update the keys to use operator name
JSONObject keys = joinObj.getJSONObject("keys:");
// find out the vertex for the big table
Set<Vertex> parentVertexes = new HashSet<>();
for (Connection connection : vertex.parentConnections) {
parentVertexes.add(connection.from);
}
parentVertexes.removeAll(posToVertex.values());
Map<String, String> posToOpId = new LinkedHashMap<>();
if (keys.length() != 0) {
for (String key : JSONObject.getNames(keys)) {
// first search from the posToVertex
if (posToVertex.containsKey(key)) {
Vertex vertex = posToVertex.get(key);
if (vertex.rootOps.size() == 1) {
posToOpId.put(key, vertex.rootOps.get(0).operatorId);
} else if ((vertex.rootOps.size() == 0 && vertex.vertexType == VertexType.UNION)) {
posToOpId.put(key, vertex.name);
} else {
Op singleRSOp = vertex.getSingleRSOp();
if (singleRSOp != null) {
posToOpId.put(key, singleRSOp.operatorId);
} else {
throw new Exception("There are none or more than one root operators in a single vertex " + vertex.name + " when hive explain user is trying to identify the operator id.");
}
}
} else // then search from parent
if (parent != null) {
posToOpId.put(key, parent.operatorId);
} else // then assume it is from its own vertex
if (parentVertexes.size() == 1) {
Vertex vertex = parentVertexes.iterator().next();
parentVertexes.clear();
if (vertex.rootOps.size() == 1) {
posToOpId.put(key, vertex.rootOps.get(0).operatorId);
} else if ((vertex.rootOps.size() == 0 && vertex.vertexType == VertexType.UNION)) {
posToOpId.put(key, vertex.name);
} else {
Op singleRSOp = vertex.getSingleRSOp();
if (singleRSOp != null) {
posToOpId.put(key, singleRSOp.operatorId);
} else {
throw new Exception("There are none or more than one root operators in a single vertex " + vertex.name + " when hive explain user is trying to identify the operator id.");
}
}
} else // finally throw an exception
{
throw new Exception("Can not find the source operator on one of the branches of map join.");
}
}
}
this.attrs.remove("keys:");
StringBuffer sb = new StringBuffer();
JSONArray conditionMap = joinObj.getJSONArray("condition map:");
for (int index = 0; index < conditionMap.length(); index++) {
JSONObject cond = conditionMap.getJSONObject(index);
String k = (String) cond.keys().next();
JSONObject condObject = new JSONObject((String) cond.get(k));
String type = condObject.getString("type");
String left = condObject.getString("left");
String right = condObject.getString("right");
if (keys.length() != 0) {
sb.append(posToOpId.get(left) + "." + keys.get(left) + "=" + posToOpId.get(right) + "." + keys.get(right) + "(" + type + "),");
} else {
// probably a cross product
sb.append("(" + type + "),");
}
}
this.attrs.remove("condition map:");
this.attrs.put("Conds:", sb.substring(0, sb.length() - 1));
} else // should be merge join
{
Map<String, String> posToOpId = new LinkedHashMap<>();
if (vertex.mergeJoinDummyVertexs.size() == 0) {
if (vertex.tagToInput.size() != vertex.parentConnections.size()) {
throw new Exception("tagToInput size " + vertex.tagToInput.size() + " is different from parentConnections size " + vertex.parentConnections.size());
}
for (Entry<String, String> entry : vertex.tagToInput.entrySet()) {
Connection c = null;
for (Connection connection : vertex.parentConnections) {
if (connection.from.name.equals(entry.getValue())) {
Vertex v = connection.from;
if (v.rootOps.size() == 1) {
posToOpId.put(entry.getKey(), v.rootOps.get(0).operatorId);
} else if ((v.rootOps.size() == 0 && v.vertexType == VertexType.UNION)) {
posToOpId.put(entry.getKey(), v.name);
} else {
Op singleRSOp = v.getSingleRSOp();
if (singleRSOp != null) {
posToOpId.put(entry.getKey(), singleRSOp.operatorId);
} else {
throw new Exception("There are none or more than one root operators in a single vertex " + v.name + " when hive explain user is trying to identify the operator id.");
}
}
c = connection;
break;
}
}
if (c == null) {
throw new Exception("Can not find " + entry.getValue() + " while parsing keys of merge join operator");
}
}
} else {
posToOpId.put(vertex.tag, this.parent.operatorId);
for (Vertex v : vertex.mergeJoinDummyVertexs) {
if (v.rootOps.size() != 1) {
throw new Exception("Can not find a single root operators in a single vertex " + v.name + " when hive explain user is trying to identify the operator id.");
}
posToOpId.put(v.tag, v.rootOps.get(0).operatorId);
}
}
JSONObject joinObj = opObject.getJSONObject(this.name);
// update the keys to use operator name
JSONObject keys = joinObj.getJSONObject("keys:");
if (keys.length() != 0) {
for (String key : JSONObject.getNames(keys)) {
if (!posToOpId.containsKey(key)) {
throw new Exception("Can not find the source operator on one of the branches of merge join.");
}
}
// inline merge join operator in a self-join
if (this.vertex != null) {
for (Vertex v : this.vertex.mergeJoinDummyVertexs) {
parser.addInline(this, new Connection(null, v));
}
}
}
// update the attrs
this.attrs.remove("keys:");
StringBuffer sb = new StringBuffer();
JSONArray conditionMap = joinObj.getJSONArray("condition map:");
for (int index = 0; index < conditionMap.length(); index++) {
JSONObject cond = conditionMap.getJSONObject(index);
String k = (String) cond.keys().next();
JSONObject condObject = new JSONObject((String) cond.get(k));
String type = condObject.getString("type");
String left = condObject.getString("left");
String right = condObject.getString("right");
if (keys.length() != 0) {
sb.append(posToOpId.get(left) + "." + keys.get(left) + "=" + posToOpId.get(right) + "." + keys.get(right) + "(" + type + "),");
} else {
// probably a cross product
sb.append("(" + type + "),");
}
}
this.attrs.remove("condition map:");
this.attrs.put("Conds:", sb.substring(0, sb.length() - 1));
}
}
Aggregations