use of org.json.simple.JSONArray in project hadoop by apache.
the class FSOperations method aclStatusToJSON.
/** Converts an <code>AclStatus</code> object into a JSON object.
*
* @param aclStatus AclStatus object
*
* @return The JSON representation of the ACLs for the file
*/
@SuppressWarnings({ "unchecked" })
private static Map<String, Object> aclStatusToJSON(AclStatus aclStatus) {
Map<String, Object> json = new LinkedHashMap<String, Object>();
Map<String, Object> inner = new LinkedHashMap<String, Object>();
JSONArray entriesArray = new JSONArray();
inner.put(HttpFSFileSystem.OWNER_JSON, aclStatus.getOwner());
inner.put(HttpFSFileSystem.GROUP_JSON, aclStatus.getGroup());
inner.put(HttpFSFileSystem.ACL_STICKY_BIT_JSON, aclStatus.isStickyBit());
for (AclEntry e : aclStatus.getEntries()) {
entriesArray.add(e.toString());
}
inner.put(HttpFSFileSystem.ACL_ENTRIES_JSON, entriesArray);
json.put(HttpFSFileSystem.ACL_STATUS_JSON, inner);
return json;
}
use of org.json.simple.JSONArray in project hadoop by apache.
the class FSOperations method toJson.
/**
* @param fileStatuses list of FileStatus objects
* @return JSON map suitable for wire transport
*/
@SuppressWarnings({ "unchecked" })
private static Map<String, Object> toJson(FileStatus[] fileStatuses) {
Map<String, Object> json = new LinkedHashMap<>();
Map<String, Object> inner = new LinkedHashMap<>();
JSONArray statuses = new JSONArray();
for (FileStatus f : fileStatuses) {
statuses.add(toJsonInner(f, false));
}
inner.put(HttpFSFileSystem.FILE_STATUS_JSON, statuses);
json.put(HttpFSFileSystem.FILE_STATUSES_JSON, inner);
return json;
}
use of org.json.simple.JSONArray in project zookeeper by apache.
the class Fs method handleRequest.
String handleRequest(JsonRequest request) throws Exception {
String output = "";
JSONArray filelist = new JSONArray();
File base = new File(request.getString("path", "/"));
if (!base.exists() || !base.isDirectory()) {
throw new FileNotFoundException("Couldn't find [" + request + "]");
}
File[] files = base.listFiles();
Arrays.sort(files, new Comparator<File>() {
public int compare(File o1, File o2) {
if (o1.isDirectory() != o2.isDirectory()) {
if (o1.isDirectory()) {
return -1;
} else {
return 1;
}
}
return o1.getName().compareToIgnoreCase(o2.getName());
}
});
for (File f : files) {
JSONObject o = new JSONObject();
o.put("file", f.getName());
o.put("type", f.isDirectory() ? "D" : "F");
o.put("path", f.getCanonicalPath());
filelist.add(o);
}
return JSONValue.toJSONString(filelist);
}
use of org.json.simple.JSONArray in project zookeeper by apache.
the class Throughput method handleRequest.
public String handleRequest(JsonRequest request) throws Exception {
long starttime = 0;
long endtime = 0;
long period = 0;
long scale = 0;
starttime = request.getNumber("start", 0);
endtime = request.getNumber("end", 0);
period = request.getNumber("period", 0);
if (starttime == 0) {
starttime = source.getStartTime();
}
if (endtime == 0) {
if (period > 0) {
endtime = starttime + period;
} else {
endtime = source.getEndTime();
}
}
String scalestr = request.getString("scale", "minutes");
if (scalestr.equals("seconds")) {
scale = MS_PER_SEC;
} else if (scalestr.equals("hours")) {
scale = MS_PER_HOUR;
} else {
scale = MS_PER_MIN;
}
LogIterator iter = source.iterator(starttime, endtime);
long current = 0;
long currentms = 0;
HashSet<Long> zxids_ms = new HashSet<Long>();
long zxidcount = 0;
JSONArray events = new JSONArray();
while (iter.hasNext()) {
LogEntry e = iter.next();
if (e.getType() != LogEntry.Type.TXN) {
continue;
}
TransactionEntry cxn = (TransactionEntry) e;
long ms = cxn.getTimestamp();
long inscale = ms / scale;
if (currentms != ms && currentms != 0) {
zxidcount += zxids_ms.size();
zxids_ms.clear();
}
if (inscale != current && current != 0) {
JSONObject o = new JSONObject();
o.put("time", current * scale);
o.put("count", zxidcount);
events.add(o);
zxidcount = 0;
}
current = inscale;
currentms = ms;
zxids_ms.add(cxn.getZxid());
}
JSONObject o = new JSONObject();
o.put("time", current * scale);
o.put("count", zxidcount);
events.add(o);
iter.close();
return JSONValue.toJSONString(events);
}
use of org.json.simple.JSONArray in project OpenGrok by OpenGrok.
the class Results method prettyPrint.
/**
* Prints out results in html form. The following search helper fields are
* required to be properly initialized: <ul>
* <li>{@link SearchHelper#dataRoot}</li>
* <li>{@link SearchHelper#contextPath}</li>
* <li>{@link SearchHelper#searcher}</li> <li>{@link SearchHelper#hits}</li>
* <li>{@link SearchHelper#historyContext} (ignored if {@code null})</li>
* <li>{@link SearchHelper#sourceContext} (ignored if {@code null})</li>
* <li>{@link SearchHelper#summarizer} (if sourceContext is not
* {@code null})</li> <li>{@link SearchHelper#compressed} (if sourceContext
* is not {@code null})</li> <li>{@link SearchHelper#sourceRoot} (if
* sourceContext or historyContext is not {@code null})</li> </ul>
*
* @param out write destination
* @param sh search helper which has all required fields set
* @param start index of the first hit to print
* @param end index of the last hit to print
* @throws HistoryException
* @throws IOException
* @throws ClassNotFoundException
*/
public static void prettyPrint(Writer out, SearchHelper sh, int start, int end) throws HistoryException, IOException, ClassNotFoundException {
Project p;
String ctxE = Util.URIEncodePath(sh.contextPath);
String xrefPrefix = sh.contextPath + Prefix.XREF_P;
String morePrefix = sh.contextPath + Prefix.MORE_P;
String xrefPrefixE = ctxE + Prefix.XREF_P;
File xrefDataDir = new File(sh.dataRoot, Prefix.XREF_P.toString());
for (Map.Entry<String, ArrayList<Document>> entry : createMap(sh.searcher, sh.hits, start, end).entrySet()) {
String parent = entry.getKey();
out.write("<tr class=\"dir\"><td colspan=\"3\"><a href=\"");
out.write(xrefPrefixE);
out.write(Util.URIEncodePath(parent));
out.write("/\">");
// htmlize ???
out.write(parent);
out.write("/</a>");
if (sh.desc != null) {
out.write(" - <i>");
// htmlize ???
out.write(sh.desc.get(parent));
out.write("</i>");
}
JSONArray messages;
if ((p = Project.getProject(parent)) != null && (messages = Util.messagesToJson(p, RuntimeEnvironment.MESSAGES_MAIN_PAGE_TAG)).size() > 0) {
out.write(" <a ");
out.write("href=\"" + xrefPrefix + "/" + p.getName() + "\">");
out.write("<span class=\"important-note important-note-rounded\" data-messages='" + messages + "'>!</span>");
out.write("</a>");
}
out.write("</td></tr>");
for (Document doc : entry.getValue()) {
String rpath = doc.get(QueryBuilder.PATH);
String rpathE = Util.URIEncodePath(rpath);
DateFormat df;
out.write("<tr>");
Util.writeHAD(out, sh.contextPath, rpathE, false);
out.write("<td class=\"f\"><a href=\"");
out.write(xrefPrefixE);
out.write(rpathE);
out.write("\"");
if (RuntimeEnvironment.getInstance().isLastEditedDisplayMode()) {
try {
// insert last edited date if possible
df = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
String dd = df.format(DateTools.stringToDate(doc.get("date")));
out.write(" class=\"result-annotate\" title=\"");
out.write("Last modified: ");
out.write(dd);
out.write("\"");
} catch (ParseException ex) {
LOGGER.log(Level.WARNING, "An error parsing date information", ex);
}
}
out.write(">");
// htmlize ???
out.write(rpath.substring(rpath.lastIndexOf('/') + 1));
out.write("</a>");
out.write("</td><td><tt class=\"con\">");
if (sh.sourceContext != null) {
Genre genre = Genre.get(doc.get("t"));
Definitions tags = null;
IndexableField tagsField = doc.getField(QueryBuilder.TAGS);
if (tagsField != null) {
tags = Definitions.deserialize(tagsField.binaryValue().bytes);
}
Scopes scopes;
IndexableField scopesField = doc.getField(QueryBuilder.SCOPES);
if (scopesField != null) {
scopes = Scopes.deserialize(scopesField.binaryValue().bytes);
} else {
scopes = new Scopes();
}
if (Genre.XREFABLE == genre && sh.summarizer != null) {
String xtags = getTags(xrefDataDir, rpath, sh.compressed);
// FIXME use Highlighter from lucene contrib here,
// instead of summarizer, we'd also get rid of
// apache lucene in whole source ...
out.write(sh.summarizer.getSummary(xtags).toString());
} else if (Genre.HTML == genre && sh.summarizer != null) {
String htags = getTags(sh.sourceRoot, rpath, false);
out.write(sh.summarizer.getSummary(htags).toString());
} else {
FileReader r = genre == Genre.PLAIN ? new FileReader(new File(sh.sourceRoot, rpath)) : null;
sh.sourceContext.getContext(r, out, xrefPrefix, morePrefix, rpath, tags, true, sh.builder.isDefSearch(), null, scopes);
}
}
if (sh.historyContext != null) {
sh.historyContext.getContext(new File(sh.sourceRoot, rpath), rpath, out, sh.contextPath);
}
out.write("</tt></td></tr>\n");
}
}
}
Aggregations