use of com.fasterxml.jackson.core.JsonFactory in project hadoop by apache.
the class QueueManager method dumpConfiguration.
/***
* Dumps the configuration of hierarchy of queues with
* the xml file path given. It is to be used directly ONLY FOR TESTING.
* @param out the writer object to which dump is written to.
* @param configFile the filename of xml file
* @throws IOException
*/
static void dumpConfiguration(Writer out, String configFile, Configuration conf) throws IOException {
if (conf != null && conf.get(DeprecatedQueueConfigurationParser.MAPRED_QUEUE_NAMES_KEY) != null) {
return;
}
JsonFactory dumpFactory = new JsonFactory();
JsonGenerator dumpGenerator = dumpFactory.createGenerator(out);
QueueConfigurationParser parser;
boolean aclsEnabled = false;
if (conf != null) {
aclsEnabled = conf.getBoolean(MRConfig.MR_ACLS_ENABLED, false);
}
if (configFile != null && !"".equals(configFile)) {
parser = new QueueConfigurationParser(configFile, aclsEnabled);
} else {
parser = getQueueConfigurationParser(null, false, aclsEnabled);
}
dumpGenerator.writeStartObject();
dumpGenerator.writeFieldName("queues");
dumpGenerator.writeStartArray();
dumpConfiguration(dumpGenerator, parser.getRoot().getChildren());
dumpGenerator.writeEndArray();
dumpGenerator.writeEndObject();
dumpGenerator.flush();
}
use of com.fasterxml.jackson.core.JsonFactory in project hadoop by apache.
the class SLSUtils method parseNodesFromNodeFile.
/**
* parse the input node file, return each host name
*/
public static Set<String> parseNodesFromNodeFile(String nodeFile) throws IOException {
Set<String> nodeSet = new HashSet<String>();
JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
Reader input = new InputStreamReader(new FileInputStream(nodeFile), "UTF-8");
try {
Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
while (i.hasNext()) {
Map jsonE = i.next();
String rack = "/" + jsonE.get("rack");
List tasks = (List) jsonE.get("nodes");
for (Object o : tasks) {
Map jsonNode = (Map) o;
nodeSet.add(rack + "/" + jsonNode.get("node"));
}
}
} finally {
input.close();
}
return nodeSet;
}
use of com.fasterxml.jackson.core.JsonFactory in project hadoop by apache.
the class StatePool method write.
private void write(DataOutput out) throws IOException {
// This is just a JSON experiment
System.out.println("Dumping the StatePool's in JSON format.");
ObjectMapper outMapper = new ObjectMapper();
// define a module
SimpleModule module = new SimpleModule("State Serializer", new Version(0, 1, 1, "FINAL", "", ""));
// add the state serializer
//module.addSerializer(State.class, new StateSerializer());
// register the module with the object-mapper
outMapper.registerModule(module);
JsonFactory outFactory = outMapper.getFactory();
JsonGenerator jGen = outFactory.createGenerator((DataOutputStream) out, JsonEncoding.UTF8);
jGen.useDefaultPrettyPrinter();
jGen.writeObject(this);
jGen.close();
}
use of com.fasterxml.jackson.core.JsonFactory in project hadoop by apache.
the class TestHistograms method main.
public static void main(String[] args) throws IOException {
final Configuration conf = new Configuration();
final FileSystem lfs = FileSystem.getLocal(conf);
for (String arg : args) {
Path filePath = new Path(arg).makeQualified(lfs);
String fileName = filePath.getName();
if (fileName.startsWith("input")) {
LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs);
String testName = fileName.substring("input".length());
Path goldFilePath = new Path(filePath.getParent(), "gold" + testName);
ObjectMapper mapper = new ObjectMapper();
JsonFactory factory = mapper.getFactory();
FSDataOutputStream ostream = lfs.create(goldFilePath, true);
JsonGenerator gen = factory.createGenerator(ostream, JsonEncoding.UTF8);
gen.useDefaultPrettyPrinter();
gen.writeObject(newResult);
gen.close();
} else {
System.err.println("Input file not started with \"input\". File " + fileName + " skipped.");
}
}
}
use of com.fasterxml.jackson.core.JsonFactory in project hadoop by apache.
the class SLSRunner method startAMFromSLSTraces.
/**
* parse workload information from sls trace files
*/
@SuppressWarnings("unchecked")
private void startAMFromSLSTraces(Resource containerResource, int heartbeatInterval) throws IOException {
// parse from sls traces
JsonFactory jsonF = new JsonFactory();
ObjectMapper mapper = new ObjectMapper();
for (String inputTrace : inputTraces) {
Reader input = new InputStreamReader(new FileInputStream(inputTrace), "UTF-8");
try {
Iterator<Map> i = mapper.readValues(jsonF.createParser(input), Map.class);
while (i.hasNext()) {
Map jsonJob = i.next();
// load job information
long jobStartTime = Long.parseLong(jsonJob.get("job.start.ms").toString());
long jobFinishTime = Long.parseLong(jsonJob.get("job.end.ms").toString());
String user = (String) jsonJob.get("job.user");
if (user == null)
user = "default";
String queue = jsonJob.get("job.queue.name").toString();
String oldAppId = jsonJob.get("job.id").toString();
boolean isTracked = trackedApps.contains(oldAppId);
int queueSize = queueAppNumMap.containsKey(queue) ? queueAppNumMap.get(queue) : 0;
queueSize++;
queueAppNumMap.put(queue, queueSize);
// tasks
List tasks = (List) jsonJob.get("job.tasks");
if (tasks == null || tasks.size() == 0) {
continue;
}
List<ContainerSimulator> containerList = new ArrayList<ContainerSimulator>();
for (Object o : tasks) {
Map jsonTask = (Map) o;
String hostname = jsonTask.get("container.host").toString();
long taskStart = Long.parseLong(jsonTask.get("container.start.ms").toString());
long taskFinish = Long.parseLong(jsonTask.get("container.end.ms").toString());
long lifeTime = taskFinish - taskStart;
// Set memory and vcores from job trace file
Resource res = Resources.clone(containerResource);
if (jsonTask.containsKey("container.memory")) {
int containerMemory = Integer.parseInt(jsonTask.get("container.memory").toString());
res.setMemorySize(containerMemory);
}
if (jsonTask.containsKey("container.vcores")) {
int containerVCores = Integer.parseInt(jsonTask.get("container.vcores").toString());
res.setVirtualCores(containerVCores);
}
int priority = Integer.parseInt(jsonTask.get("container.priority").toString());
String type = jsonTask.get("container.type").toString();
containerList.add(new ContainerSimulator(res, lifeTime, hostname, priority, type));
}
// create a new AM
String amType = jsonJob.get("am.type").toString();
AMSimulator amSim = (AMSimulator) ReflectionUtils.newInstance(amClassMap.get(amType), new Configuration());
if (amSim != null) {
amSim.init(AM_ID++, heartbeatInterval, containerList, rm, this, jobStartTime, jobFinishTime, user, queue, isTracked, oldAppId);
runner.schedule(amSim);
maxRuntime = Math.max(maxRuntime, jobFinishTime);
numTasks += containerList.size();
amMap.put(oldAppId, amSim);
}
}
} finally {
input.close();
}
}
}
Aggregations