use of org.w3c.dom.ProcessingInstruction in project robovm by robovm.
the class NodeIsSupported method testIsSupported5.
public void testIsSupported5() throws Throwable {
Document doc;
ProcessingInstruction pi;
boolean success;
doc = (Document) load("staffNS", builder);
pi = doc.createProcessingInstruction("PITarget", "PIData");
success = pi.isSupported("-", "+");
assertFalse("nodeissupported05", success);
}
use of org.w3c.dom.ProcessingInstruction in project robovm by robovm.
the class NodeNormalize method testNormalize.
/**
* Runs the test case.
*
* @throws Throwable
* Any uncaught exception causes test to fail
*/
public void testNormalize() throws Throwable {
Document doc;
Document newDoc;
DOMImplementation domImpl;
DocumentType docTypeNull = null;
Element documentElement;
Element element1;
Element element2;
Element element3;
Element element4;
Element element5;
Element element6;
Element element7;
Text text1;
Text text2;
Text text3;
ProcessingInstruction pi;
CDATASection cData;
Comment comment;
EntityReference entRef;
NodeList elementList;
doc = (Document) load("staffNS", builder);
domImpl = doc.getImplementation();
newDoc = domImpl.createDocument("http://www.w3.org/DOM/Test", "dom:root", docTypeNull);
element1 = newDoc.createElement("element1");
element2 = newDoc.createElement("element2");
element3 = newDoc.createElement("element3");
element4 = newDoc.createElement("element4");
element5 = newDoc.createElement("element5");
element6 = newDoc.createElement("element6");
element7 = newDoc.createElement("element7");
text1 = newDoc.createTextNode("text1");
text2 = newDoc.createTextNode("text2");
text3 = newDoc.createTextNode("text3");
cData = newDoc.createCDATASection("Cdata");
comment = newDoc.createComment("comment");
pi = newDoc.createProcessingInstruction("PITarget", "PIData");
entRef = newDoc.createEntityReference("EntRef");
assertNotNull("createdEntRefNotNull", entRef);
documentElement = newDoc.getDocumentElement();
documentElement.appendChild(element1);
element2.appendChild(text1);
element2.appendChild(text2);
element2.appendChild(text3);
element1.appendChild(element2);
text1 = (Text) text1.cloneNode(false);
text2 = (Text) text2.cloneNode(false);
element3.appendChild(entRef);
element3.appendChild(text1);
element3.appendChild(text2);
element1.appendChild(element3);
text1 = (Text) text1.cloneNode(false);
text2 = (Text) text2.cloneNode(false);
element4.appendChild(cData);
element4.appendChild(text1);
element4.appendChild(text2);
element1.appendChild(element4);
text2 = (Text) text2.cloneNode(false);
text3 = (Text) text3.cloneNode(false);
element5.appendChild(comment);
element5.appendChild(text2);
element5.appendChild(text3);
element1.appendChild(element5);
text2 = (Text) text2.cloneNode(false);
text3 = (Text) text3.cloneNode(false);
element6.appendChild(pi);
element6.appendChild(text2);
element6.appendChild(text3);
element1.appendChild(element6);
entRef = (EntityReference) entRef.cloneNode(false);
text1 = (Text) text1.cloneNode(false);
text2 = (Text) text2.cloneNode(false);
text3 = (Text) text3.cloneNode(false);
element7.appendChild(entRef);
element7.appendChild(text1);
element7.appendChild(text2);
element7.appendChild(text3);
element1.appendChild(element7);
elementList = element1.getChildNodes();
assertEquals("nodeNormalize01_1Bef", 6, elementList.getLength());
elementList = element2.getChildNodes();
assertEquals("nodeNormalize01_2Bef", 3, elementList.getLength());
elementList = element3.getChildNodes();
assertEquals("nodeNormalize01_3Bef", 3, elementList.getLength());
elementList = element4.getChildNodes();
assertEquals("nodeNormalize01_4Bef", 3, elementList.getLength());
elementList = element5.getChildNodes();
assertEquals("nodeNormalize01_5Bef", 3, elementList.getLength());
elementList = element6.getChildNodes();
assertEquals("nodeNormalize01_6Bef", 3, elementList.getLength());
elementList = element7.getChildNodes();
assertEquals("nodeNormalize01_7Bef", 4, elementList.getLength());
newDoc.normalize();
elementList = element1.getChildNodes();
assertEquals("nodeNormalize01_1Aft", 6, elementList.getLength());
elementList = element2.getChildNodes();
assertEquals("nodeNormalize01_2Aft", 1, elementList.getLength());
elementList = element3.getChildNodes();
assertEquals("nodeNormalize01_3Aft", 2, elementList.getLength());
elementList = element4.getChildNodes();
assertEquals("nodeNormalize01_4Aft", 2, elementList.getLength());
elementList = element5.getChildNodes();
assertEquals("nodeNormalize01_5Aft", 2, elementList.getLength());
elementList = element6.getChildNodes();
assertEquals("nodeNormalize01_6Aft", 2, elementList.getLength());
elementList = element7.getChildNodes();
assertEquals("nodeNormalize01_7Aft", 2, elementList.getLength());
}
use of org.w3c.dom.ProcessingInstruction in project j2objc by google.
the class TreeWalker method startNode.
/**
* Start processing given node
*
*
* @param node Node to process
*
* @throws org.xml.sax.SAXException
*/
protected void startNode(Node node) throws org.xml.sax.SAXException {
if (node instanceof Locator) {
Locator loc = (Locator) node;
m_locator.setColumnNumber(loc.getColumnNumber());
m_locator.setLineNumber(loc.getLineNumber());
m_locator.setPublicId(loc.getPublicId());
m_locator.setSystemId(loc.getSystemId());
} else {
m_locator.setColumnNumber(0);
m_locator.setLineNumber(0);
}
switch(node.getNodeType()) {
case Node.COMMENT_NODE:
{
String data = ((Comment) node).getData();
if (m_contentHandler instanceof LexicalHandler) {
LexicalHandler lh = ((LexicalHandler) this.m_contentHandler);
lh.comment(data.toCharArray(), 0, data.length());
}
}
break;
case Node.DOCUMENT_FRAGMENT_NODE:
// ??;
break;
case Node.DOCUMENT_NODE:
break;
case Node.ELEMENT_NODE:
Element elem_node = (Element) node;
{
// Make sure the namespace node
// for the element itself is declared
// to the ContentHandler
String uri = elem_node.getNamespaceURI();
if (uri != null) {
String prefix = elem_node.getPrefix();
if (prefix == null)
prefix = "";
this.m_contentHandler.startPrefixMapping(prefix, uri);
}
}
NamedNodeMap atts = elem_node.getAttributes();
int nAttrs = atts.getLength();
// each attribute is declared to the ContentHandler
for (int i = 0; i < nAttrs; i++) {
final Node attr = atts.item(i);
final String attrName = attr.getNodeName();
final int colon = attrName.indexOf(':');
final String prefix;
// System.out.println("TreeWalker#startNode: attr["+i+"] = "+attrName+", "+attr.getNodeValue());
if (attrName.equals("xmlns") || attrName.startsWith("xmlns:")) {
// to "Steven Murray" <smurray@ebt.com>.
if (colon < 0)
prefix = "";
else
prefix = attrName.substring(colon + 1);
this.m_contentHandler.startPrefixMapping(prefix, attr.getNodeValue());
} else if (colon > 0) {
prefix = attrName.substring(0, colon);
String uri = attr.getNamespaceURI();
if (uri != null)
this.m_contentHandler.startPrefixMapping(prefix, uri);
}
}
String ns = m_dh.getNamespaceOfNode(node);
if (null == ns)
ns = "";
this.m_contentHandler.startElement(ns, m_dh.getLocalNameOfNode(node), node.getNodeName(), new AttList(atts, m_dh));
break;
case Node.PROCESSING_INSTRUCTION_NODE:
{
ProcessingInstruction pi = (ProcessingInstruction) node;
String name = pi.getNodeName();
// String data = pi.getData();
if (name.equals("xslt-next-is-raw")) {
nextIsRaw = true;
} else {
this.m_contentHandler.processingInstruction(pi.getNodeName(), pi.getData());
}
}
break;
case Node.CDATA_SECTION_NODE:
{
boolean isLexH = (m_contentHandler instanceof LexicalHandler);
LexicalHandler lh = isLexH ? ((LexicalHandler) this.m_contentHandler) : null;
if (isLexH) {
lh.startCDATA();
}
dispatachChars(node);
{
if (isLexH) {
lh.endCDATA();
}
}
}
break;
case Node.TEXT_NODE:
{
if (nextIsRaw) {
nextIsRaw = false;
m_contentHandler.processingInstruction(javax.xml.transform.Result.PI_DISABLE_OUTPUT_ESCAPING, "");
dispatachChars(node);
m_contentHandler.processingInstruction(javax.xml.transform.Result.PI_ENABLE_OUTPUT_ESCAPING, "");
} else {
dispatachChars(node);
}
}
break;
case Node.ENTITY_REFERENCE_NODE:
{
EntityReference eref = (EntityReference) node;
if (m_contentHandler instanceof LexicalHandler) {
((LexicalHandler) this.m_contentHandler).startEntity(eref.getNodeName());
} else {
// warning("Can not output entity to a pure SAX ContentHandler");
}
}
break;
default:
}
}
use of org.w3c.dom.ProcessingInstruction in project XobotOS by xamarin.
the class DocumentImpl method shallowCopy.
/**
* Returns a shallow copy of the given node. If the node is an element node,
* its attributes are always copied.
*
* @param node a node belonging to any document or DOM implementation.
* @param operation the operation type to use when notifying user data
* handlers of copied element attributes. It is the caller's
* responsibility to notify user data handlers of the returned node.
* @return a new node whose document is this document and whose DOM
* implementation is this DOM implementation.
*/
private NodeImpl shallowCopy(short operation, Node node) {
switch(node.getNodeType()) {
case Node.ATTRIBUTE_NODE:
AttrImpl attr = (AttrImpl) node;
AttrImpl attrCopy;
if (attr.namespaceAware) {
attrCopy = createAttributeNS(attr.getNamespaceURI(), attr.getLocalName());
attrCopy.setPrefix(attr.getPrefix());
} else {
attrCopy = createAttribute(attr.getName());
}
attrCopy.setNodeValue(attr.getValue());
return attrCopy;
case Node.CDATA_SECTION_NODE:
return createCDATASection(((CharacterData) node).getData());
case Node.COMMENT_NODE:
return createComment(((Comment) node).getData());
case Node.DOCUMENT_FRAGMENT_NODE:
return createDocumentFragment();
case Node.DOCUMENT_NODE:
case Node.DOCUMENT_TYPE_NODE:
throw new DOMException(DOMException.NOT_SUPPORTED_ERR, "Cannot copy node of type " + node.getNodeType());
case Node.ELEMENT_NODE:
ElementImpl element = (ElementImpl) node;
ElementImpl elementCopy;
if (element.namespaceAware) {
elementCopy = createElementNS(element.getNamespaceURI(), element.getLocalName());
elementCopy.setPrefix(element.getPrefix());
} else {
elementCopy = createElement(element.getTagName());
}
NamedNodeMap attributes = element.getAttributes();
for (int i = 0; i < attributes.getLength(); i++) {
AttrImpl elementAttr = (AttrImpl) attributes.item(i);
AttrImpl elementAttrCopy = (AttrImpl) shallowCopy(operation, elementAttr);
notifyUserDataHandlers(operation, elementAttr, elementAttrCopy);
if (elementAttr.namespaceAware) {
elementCopy.setAttributeNodeNS(elementAttrCopy);
} else {
elementCopy.setAttributeNode(elementAttrCopy);
}
}
return elementCopy;
case Node.ENTITY_NODE:
case Node.NOTATION_NODE:
// TODO: implement this when we support these node types
throw new UnsupportedOperationException();
case Node.ENTITY_REFERENCE_NODE:
/*
* When we support entities in the doctype, this will need to
* behave differently for clones vs. imports. Clones copy
* entities by value, copying the referenced subtree from the
* original document. Imports copy entities by reference,
* possibly referring to a different subtree in the new
* document.
*/
return createEntityReference(node.getNodeName());
case Node.PROCESSING_INSTRUCTION_NODE:
ProcessingInstruction pi = (ProcessingInstruction) node;
return createProcessingInstruction(pi.getTarget(), pi.getData());
case Node.TEXT_NODE:
return createTextNode(((Text) node).getData());
default:
throw new DOMException(DOMException.NOT_SUPPORTED_ERR, "Unsupported node type " + node.getNodeType());
}
}
use of org.w3c.dom.ProcessingInstruction in project webservices-axiom by apache.
the class DOMReader method proceed.
@Override
public boolean proceed() throws StreamException {
Node currentNode = this.currentNode;
int state = this.state;
loop: while (true) {
switch(state) {
case START:
if (rootNode instanceof Document) {
currentNode = rootNode;
}
state = NOT_VISITED;
break;
case NOT_VISITED:
if (currentNode == null) {
currentNode = rootNode;
} else {
Node node = currentNode.getFirstChild();
if (node == null) {
state = VISITED;
} else {
currentNode = node;
}
}
break;
case VISITED:
if (currentNode == null || currentNode instanceof Document) {
throw new IllegalStateException();
} else if (currentNode == rootNode) {
currentNode = null;
} else {
Node node = currentNode.getNextSibling();
if (node == null) {
currentNode = currentNode.getParentNode();
} else {
currentNode = node;
state = NOT_VISITED;
}
}
break;
default:
throw new IllegalStateException();
}
int nodeType = currentNode == null ? Node.DOCUMENT_NODE : currentNode.getNodeType();
if (state == VISITED) {
// In the future, there may be other node types that generate events here
switch(nodeType) {
case Node.ELEMENT_NODE:
handler.endElement();
break loop;
case Node.DOCUMENT_NODE:
handler.completed();
state = COMPLETE;
break loop;
}
} else {
switch(nodeType) {
case Node.DOCUMENT_NODE:
if (currentNode != null) {
Document document = (Document) currentNode;
if (dom3) {
handler.startDocument(document.getInputEncoding(), document.getXmlVersion(), document.getXmlEncoding(), document.getXmlStandalone());
} else {
handler.startDocument(null, "1.0", null, null);
}
} else {
handler.startFragment();
}
break loop;
case Node.DOCUMENT_TYPE_NODE:
DocumentType docType = (DocumentType) currentNode;
handler.processDocumentTypeDeclaration(docType.getName(), docType.getPublicId(), docType.getSystemId(), docType.getInternalSubset());
break loop;
case Node.ELEMENT_NODE:
Element element = (Element) currentNode;
String localName = element.getLocalName();
if (localName == null) {
// TODO
throw new UnsupportedOperationException();
} else {
handler.startElement(nullToEmptyString(element.getNamespaceURI()), localName, nullToEmptyString(element.getPrefix()));
}
NamedNodeMap attributes = element.getAttributes();
// TODO: we should not push all attributes at once
for (int length = attributes.getLength(), i = 0; i < length; i++) {
Attr attr = (Attr) attributes.item(i);
String attrLocalName = attr.getLocalName();
if (attrLocalName == null) {
handler.processAttribute(attr.getName(), attr.getValue(), "CDATA", attr.getSpecified());
} else {
String namespaceURI = attr.getNamespaceURI();
if (XMLConstants.XMLNS_ATTRIBUTE_NS_URI.equals(namespaceURI)) {
handler.processNamespaceDeclaration(attrLocalName.equals(XMLConstants.XMLNS_ATTRIBUTE) ? "" : attrLocalName, attr.getValue());
} else {
handler.processAttribute(nullToEmptyString(namespaceURI), attrLocalName, nullToEmptyString(attr.getPrefix()), attr.getValue(), "CDATA", attr.getSpecified());
}
}
}
handler.attributesCompleted();
break loop;
case Node.TEXT_NODE:
handler.processCharacterData(currentNode.getNodeValue(), dom3 && ((Text) currentNode).isElementContentWhitespace());
break loop;
case Node.CDATA_SECTION_NODE:
handler.startCDATASection();
handler.processCharacterData(currentNode.getNodeValue(), false);
handler.endCDATASection();
break loop;
case Node.COMMENT_NODE:
handler.startComment();
handler.processCharacterData(currentNode.getNodeValue(), false);
handler.endComment();
break loop;
case Node.PROCESSING_INSTRUCTION_NODE:
ProcessingInstruction pi = (ProcessingInstruction) currentNode;
handler.startProcessingInstruction(pi.getTarget());
handler.processCharacterData(pi.getData(), false);
handler.endProcessingInstruction();
break loop;
case Node.ENTITY_REFERENCE_NODE:
if (!expandEntityReferences) {
handler.processEntityReference(currentNode.getNodeName(), null);
state = VISITED;
break loop;
} else {
// No event has been generated, so loop again
break;
}
default:
// TODO
throw new UnsupportedOperationException("Unsupported node type " + nodeType);
}
}
}
this.currentNode = currentNode;
this.state = state;
return state == COMPLETE;
}
Aggregations