use of org.json.JSONObject in project MovieGuide by esoxjem.
the class MoviesListingParser method parse.
@NonNull
public static List<Movie> parse(String json) throws JSONException {
List<Movie> movies = new ArrayList<>(24);
JSONObject response = new JSONObject(json);
if (!response.isNull(RESULTS)) {
JSONArray results = response.getJSONArray(RESULTS);
for (int i = 0; i < results.length(); i++) {
movies.add(getMovie(results.getJSONObject(i)));
}
} else {
// No results
}
return movies;
}
use of org.json.JSONObject in project MovieGuide by esoxjem.
the class MovieDetailsParser method parseReviews.
public static List<Review> parseReviews(String body) throws JSONException {
ArrayList<Review> reviews = new ArrayList<>(4);
JSONObject response = new JSONObject(body);
if (!response.isNull(RESULTS)) {
JSONArray results = response.getJSONArray(RESULTS);
for (int i = 0; i < results.length(); i++) {
Review review = new Review();
JSONObject reviewJson = results.getJSONObject(i);
if (!reviewJson.isNull(ID)) {
review.setId(reviewJson.getString(ID));
}
if (!reviewJson.isNull(AUTHOR)) {
review.setAuthor(reviewJson.getString(AUTHOR));
}
if (!reviewJson.isNull(CONTENT)) {
review.setContent(reviewJson.getString(CONTENT));
}
if (!reviewJson.isNull(URL)) {
review.setUrl(reviewJson.getString(URL));
}
reviews.add(review);
}
}
return reviews;
}
use of org.json.JSONObject in project hive by apache.
the class Op method inlineJoinOp.
private void inlineJoinOp() throws Exception {
// inline map join operator
if (this.type == OpType.MAPJOIN) {
JSONObject joinObj = opObject.getJSONObject(this.name);
// get the map for posToVertex
JSONObject verticeObj = joinObj.getJSONObject("input vertices:");
Map<String, Vertex> posToVertex = new LinkedHashMap<>();
for (String pos : JSONObject.getNames(verticeObj)) {
String vertexName = verticeObj.getString(pos);
// update the connection
Connection c = null;
for (Connection connection : vertex.parentConnections) {
if (connection.from.name.equals(vertexName)) {
posToVertex.put(pos, connection.from);
c = connection;
break;
}
}
if (c != null) {
parser.addInline(this, c);
}
}
// update the attrs
this.attrs.remove("input vertices:");
// update the keys to use operator name
JSONObject keys = joinObj.getJSONObject("keys:");
// find out the vertex for the big table
Set<Vertex> parentVertexes = new HashSet<>();
for (Connection connection : vertex.parentConnections) {
parentVertexes.add(connection.from);
}
parentVertexes.removeAll(posToVertex.values());
Map<String, String> posToOpId = new LinkedHashMap<>();
if (keys.length() != 0) {
for (String key : JSONObject.getNames(keys)) {
// first search from the posToVertex
if (posToVertex.containsKey(key)) {
Vertex vertex = posToVertex.get(key);
if (vertex.rootOps.size() == 1) {
posToOpId.put(key, vertex.rootOps.get(0).operatorId);
} else if ((vertex.rootOps.size() == 0 && vertex.vertexType == VertexType.UNION)) {
posToOpId.put(key, vertex.name);
} else {
Op singleRSOp = vertex.getSingleRSOp();
if (singleRSOp != null) {
posToOpId.put(key, singleRSOp.operatorId);
} else {
throw new Exception("There are none or more than one root operators in a single vertex " + vertex.name + " when hive explain user is trying to identify the operator id.");
}
}
} else // then search from parent
if (parent != null) {
posToOpId.put(key, parent.operatorId);
} else // then assume it is from its own vertex
if (parentVertexes.size() == 1) {
Vertex vertex = parentVertexes.iterator().next();
parentVertexes.clear();
if (vertex.rootOps.size() == 1) {
posToOpId.put(key, vertex.rootOps.get(0).operatorId);
} else if ((vertex.rootOps.size() == 0 && vertex.vertexType == VertexType.UNION)) {
posToOpId.put(key, vertex.name);
} else {
Op singleRSOp = vertex.getSingleRSOp();
if (singleRSOp != null) {
posToOpId.put(key, singleRSOp.operatorId);
} else {
throw new Exception("There are none or more than one root operators in a single vertex " + vertex.name + " when hive explain user is trying to identify the operator id.");
}
}
} else // finally throw an exception
{
throw new Exception("Can not find the source operator on one of the branches of map join.");
}
}
}
this.attrs.remove("keys:");
StringBuffer sb = new StringBuffer();
JSONArray conditionMap = joinObj.getJSONArray("condition map:");
for (int index = 0; index < conditionMap.length(); index++) {
JSONObject cond = conditionMap.getJSONObject(index);
String k = (String) cond.keys().next();
JSONObject condObject = new JSONObject((String) cond.get(k));
String type = condObject.getString("type");
String left = condObject.getString("left");
String right = condObject.getString("right");
if (keys.length() != 0) {
sb.append(posToOpId.get(left) + "." + keys.get(left) + "=" + posToOpId.get(right) + "." + keys.get(right) + "(" + type + "),");
} else {
// probably a cross product
sb.append("(" + type + "),");
}
}
this.attrs.remove("condition map:");
this.attrs.put("Conds:", sb.substring(0, sb.length() - 1));
} else // should be merge join
{
Map<String, String> posToOpId = new LinkedHashMap<>();
if (vertex.mergeJoinDummyVertexs.size() == 0) {
if (vertex.tagToInput.size() != vertex.parentConnections.size()) {
throw new Exception("tagToInput size " + vertex.tagToInput.size() + " is different from parentConnections size " + vertex.parentConnections.size());
}
for (Entry<String, String> entry : vertex.tagToInput.entrySet()) {
Connection c = null;
for (Connection connection : vertex.parentConnections) {
if (connection.from.name.equals(entry.getValue())) {
Vertex v = connection.from;
if (v.rootOps.size() == 1) {
posToOpId.put(entry.getKey(), v.rootOps.get(0).operatorId);
} else if ((v.rootOps.size() == 0 && v.vertexType == VertexType.UNION)) {
posToOpId.put(entry.getKey(), v.name);
} else {
Op singleRSOp = v.getSingleRSOp();
if (singleRSOp != null) {
posToOpId.put(entry.getKey(), singleRSOp.operatorId);
} else {
throw new Exception("There are none or more than one root operators in a single vertex " + v.name + " when hive explain user is trying to identify the operator id.");
}
}
c = connection;
break;
}
}
if (c == null) {
throw new Exception("Can not find " + entry.getValue() + " while parsing keys of merge join operator");
}
}
} else {
posToOpId.put(vertex.tag, this.parent.operatorId);
for (Vertex v : vertex.mergeJoinDummyVertexs) {
if (v.rootOps.size() != 1) {
throw new Exception("Can not find a single root operators in a single vertex " + v.name + " when hive explain user is trying to identify the operator id.");
}
posToOpId.put(v.tag, v.rootOps.get(0).operatorId);
}
}
JSONObject joinObj = opObject.getJSONObject(this.name);
// update the keys to use operator name
JSONObject keys = joinObj.getJSONObject("keys:");
if (keys.length() != 0) {
for (String key : JSONObject.getNames(keys)) {
if (!posToOpId.containsKey(key)) {
throw new Exception("Can not find the source operator on one of the branches of merge join.");
}
}
// inline merge join operator in a self-join
if (this.vertex != null) {
for (Vertex v : this.vertex.mergeJoinDummyVertexs) {
parser.addInline(this, new Connection(null, v));
}
}
}
// update the attrs
this.attrs.remove("keys:");
StringBuffer sb = new StringBuffer();
JSONArray conditionMap = joinObj.getJSONArray("condition map:");
for (int index = 0; index < conditionMap.length(); index++) {
JSONObject cond = conditionMap.getJSONObject(index);
String k = (String) cond.keys().next();
JSONObject condObject = new JSONObject((String) cond.get(k));
String type = condObject.getString("type");
String left = condObject.getString("left");
String right = condObject.getString("right");
if (keys.length() != 0) {
sb.append(posToOpId.get(left) + "." + keys.get(left) + "=" + posToOpId.get(right) + "." + keys.get(right) + "(" + type + "),");
} else {
// probably a cross product
sb.append("(" + type + "),");
}
}
this.attrs.remove("condition map:");
this.attrs.put("Conds:", sb.substring(0, sb.length() - 1));
}
}
use of org.json.JSONObject in project hive by apache.
the class TezJsonParser method extractStagesAndPlans.
public void extractStagesAndPlans(JSONObject inputObject) throws Exception {
// extract stages
JSONObject dependency = inputObject.getJSONObject("STAGE DEPENDENCIES");
if (dependency != null && dependency.length() > 0) {
// iterate for the first time to get all the names of stages.
for (String stageName : JSONObject.getNames(dependency)) {
this.stages.put(stageName, new Stage(stageName, this));
}
// iterate for the second time to get all the dependency.
for (String stageName : JSONObject.getNames(dependency)) {
JSONObject dependentStageNames = dependency.getJSONObject(stageName);
this.stages.get(stageName).addDependency(dependentStageNames, this.stages);
}
}
// extract stage plans
JSONObject stagePlans = inputObject.getJSONObject("STAGE PLANS");
if (stagePlans != null && stagePlans.length() > 0) {
for (String stageName : JSONObject.getNames(stagePlans)) {
JSONObject stagePlan = stagePlans.getJSONObject(stageName);
this.stages.get(stageName).extractVertex(stagePlan);
}
}
}
use of org.json.JSONObject in project spring-boot-admin by codecentric.
the class ApplicationTest method test_1_5_json_format.
@Test
public void test_1_5_json_format() throws Exception {
String json = new JSONObject().put("name", "test").put("managementUrl", "http://test").put("healthUrl", "http://health").put("serviceUrl", "http://service").put("metadata", new JSONObject().put("labels", "foo,bar")).toString();
Application value = objectMapper.readValue(json, Application.class);
assertThat(value.getName(), is("test"));
assertThat(value.getManagementUrl(), is("http://test"));
assertThat(value.getHealthUrl(), is("http://health"));
assertThat(value.getServiceUrl(), is("http://service"));
assertThat(value.getMetadata(), is(Collections.singletonMap("labels", "foo,bar")));
}
Aggregations