use of javax.xml.parsers.DocumentBuilderFactory in project hadoop by apache.
the class HostsFileReader method readXmlFileToMapWithFileInputStream.
public static void readXmlFileToMapWithFileInputStream(String type, String filename, InputStream fileInputStream, Map<String, Integer> map) throws IOException {
Document dom;
DocumentBuilderFactory builder = DocumentBuilderFactory.newInstance();
try {
DocumentBuilder db = builder.newDocumentBuilder();
dom = db.parse(fileInputStream);
// Examples:
// <host><name>host1</name></host>
// <host><name>host2</name><timeout>123</timeout></host>
// <host><name>host3</name><timeout>-1</timeout></host>
// <host><name>host4, host5,host6</name><timeout>1800</timeout></host>
Element doc = dom.getDocumentElement();
NodeList nodes = doc.getElementsByTagName("host");
for (int i = 0; i < nodes.getLength(); i++) {
Node node = nodes.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
Element e = (Element) node;
// Support both single host and comma-separated list of hosts.
String v = readFirstTagValue(e, "name");
String[] hosts = StringUtils.getTrimmedStrings(v);
String str = readFirstTagValue(e, "timeout");
Integer timeout = (str == null) ? null : Integer.parseInt(str);
for (String host : hosts) {
map.put(host, timeout);
LOG.info("Adding a node \"" + host + "\" to the list of " + type + " hosts from " + filename);
}
}
}
} catch (IOException | SAXException | ParserConfigurationException e) {
LOG.fatal("error parsing " + filename, e);
throw new RuntimeException(e);
} finally {
fileInputStream.close();
}
}
use of javax.xml.parsers.DocumentBuilderFactory in project hadoop by apache.
the class TestAMWebServices method verifyBlacklistedNodesInfoXML.
public void verifyBlacklistedNodesInfoXML(String xml, AppContext ctx) throws JSONException, Exception {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList infonodes = dom.getElementsByTagName("blacklistednodesinfo");
assertEquals("incorrect number of elements", 1, infonodes.getLength());
NodeList nodes = dom.getElementsByTagName("blacklistedNodes");
Set<String> blacklistedNodes = ctx.getBlacklistedNodes();
assertEquals("incorrect number of elements", blacklistedNodes.size(), nodes.getLength());
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
assertTrue(blacklistedNodes.contains(element.getFirstChild().getNodeValue()));
}
}
use of javax.xml.parsers.DocumentBuilderFactory in project hadoop by apache.
the class TestAMWebServicesJobs method testJobCountersXML.
@Test
public void testJobCountersXML() throws Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("counters").accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList info = dom.getElementsByTagName("jobCounters");
verifyAMJobCountersXML(info, jobsMap.get(id));
}
}
use of javax.xml.parsers.DocumentBuilderFactory in project hadoop by apache.
the class TestAMWebServicesAttempts method testTaskAttemptIdXML.
@Test
public void testTaskAttemptIdXML() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
for (TaskAttempt att : task.getAttempts().values()) {
TaskAttemptId attemptid = att.getID();
String attid = MRApps.toString(attemptid);
ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).path("attempts").path(attid).accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("taskAttempt");
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyAMTaskAttemptXML(element, att, task.getType());
}
}
}
}
}
use of javax.xml.parsers.DocumentBuilderFactory in project hadoop by apache.
the class TestAMWebServicesTasks method testTaskIdXML.
@Test
public void testTaskIdXML() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + "; " + JettyUtils.UTF_8, response.getType().toString());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("task");
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyAMSingleTaskXML(element, task);
}
}
}
}
Aggregations