use of org.exist.dom.persistent.DocumentTypeImpl in project exist by eXist-db.
the class SystemImportHandler method restoreResourceEntry.
private DeferredPermission restoreResourceEntry(final Attributes atts) throws SAXException {
@Nullable final String skip = atts.getValue("skip");
// Don't process entries which should be skipped
if (skip != null && !"no".equals(skip)) {
return new SkippedEntryDeferredPermission();
}
@Nullable final String name = atts.getValue("name");
if (name == null) {
throw new SAXException("Resource requires a name attribute");
}
final boolean xmlType = Optional.ofNullable(atts.getValue("type")).filter(s -> s.equals("XMLResource")).isPresent();
final String owner = getAttr(atts, "owner", SecurityManager.SYSTEM);
final String group = getAttr(atts, "group", SecurityManager.DBA_GROUP);
final String perms = getAttr(atts, "mode", "644");
final String filename = getAttr(atts, "filename", name);
@Nullable final String mimeTypeStr = atts.getValue("mimetype");
@Nullable final String dateCreatedStr = atts.getValue("created");
@Nullable final String dateModifiedStr = atts.getValue("modified");
@Nullable final String publicId = atts.getValue("publicid");
@Nullable final String systemId = atts.getValue("systemid");
@Nullable final String nameDocType = atts.getValue("namedoctype");
MimeType mimeType = null;
if (mimeTypeStr != null) {
mimeType = MimeTable.getInstance().getContentType(mimeTypeStr);
}
if (mimeType == null) {
mimeType = xmlType ? MimeType.XML_TYPE : MimeType.BINARY_TYPE;
}
Date dateCreated = null;
if (dateCreatedStr != null) {
try {
dateCreated = new DateTimeValue(dateCreatedStr).getDate();
} catch (final XPathException xpe) {
listener.warn("Illegal creation date. Ignoring date...");
}
}
Date dateModified = null;
if (dateModifiedStr != null) {
try {
dateModified = new DateTimeValue(dateModifiedStr).getDate();
} catch (final XPathException xpe) {
listener.warn("Illegal modification date. Ignoring date...");
}
}
final DocumentType docType;
if (publicId != null || systemId != null) {
docType = new DocumentTypeImpl(nameDocType, publicId, systemId);
} else {
docType = null;
}
final XmldbURI docUri;
if (version >= STRICT_URI_VERSION) {
docUri = XmldbURI.create(name);
} else {
try {
docUri = URIUtils.encodeXmldbUriFor(name);
} catch (final URISyntaxException e) {
final String msg = "Could not parse document name into a URI: " + e.getMessage();
listener.error(msg);
LOG.error(msg, e);
return new SkippedEntryDeferredPermission();
}
}
try (final EXistInputSource is = descriptor.getInputSource(filename)) {
if (is == null) {
final String msg = "Failed to restore resource '" + name + "'\nfrom file '" + descriptor.getSymbolicPath(name, false) + "'.\nReason: Unable to obtain its EXistInputSource";
listener.warn(msg);
throw new RuntimeException(msg);
}
try (final Txn transaction = beginTransaction()) {
broker.storeDocument(transaction, docUri, is, mimeType, dateCreated, dateModified, null, docType, null, currentCollection);
try (final LockedDocument doc = currentCollection.getDocumentWithLock(broker, docUri, Lock.LockMode.READ_LOCK)) {
rh.startDocumentRestore(doc.getDocument(), atts);
}
transaction.commit();
final DeferredPermission deferredPermission;
if (name.startsWith(XmldbURI.SYSTEM_COLLECTION)) {
// prevents restore of a backup from changing system collection resource ownership
deferredPermission = new ResourceDeferredPermission(listener, currentCollection.getURI().append(name), SecurityManager.SYSTEM, SecurityManager.DBA_GROUP, Integer.parseInt(perms, 8));
} else {
deferredPermission = new ResourceDeferredPermission(listener, currentCollection.getURI().append(name), owner, group, Integer.parseInt(perms, 8));
}
try (final LockedDocument doc = currentCollection.getDocumentWithLock(broker, docUri, Lock.LockMode.READ_LOCK)) {
rh.endDocumentRestore(doc.getDocument());
}
listener.restoredResource(name);
return deferredPermission;
} catch (final Exception e) {
throw new IOException(e);
}
} catch (final Exception e) {
listener.warn("Failed to restore resource '" + name + "'\nfrom file '" + descriptor.getSymbolicPath(name, false) + "'.\nReason: " + e.getMessage());
LOG.error(e.getMessage(), e);
return new SkippedEntryDeferredPermission();
}
}
use of org.exist.dom.persistent.DocumentTypeImpl in project exist by eXist-db.
the class Indexer method startDTD.
// Methods of interface LexicalHandler
// used to determine Doctype
@Override
public void startDTD(final String name, final String publicId, final String systemId) {
final DocumentTypeImpl docType = new DocumentTypeImpl(name, publicId, systemId);
document.setDocumentType(docType);
insideDTD = true;
}
use of org.exist.dom.persistent.DocumentTypeImpl in project exist by eXist-db.
the class RestoreHandler method restoreResourceEntry.
private DeferredPermission restoreResourceEntry(final Attributes atts) throws SAXException {
@Nullable final String skip = atts.getValue("skip");
// Don't process entries which should be skipped
if (skip != null && !"no".equals(skip)) {
return new SkippedEntryDeferredPermission();
}
@Nullable final String name = atts.getValue("name");
if (name == null) {
throw new SAXException("Resource requires a name attribute");
}
final boolean xmlType = Optional.ofNullable(atts.getValue("type")).filter(s -> s.equals("XMLResource")).isPresent();
final String owner = getAttr(atts, "owner", SecurityManager.SYSTEM);
final String group = getAttr(atts, "group", SecurityManager.DBA_GROUP);
final String perms = getAttr(atts, "mode", "644");
final String filename = getAttr(atts, "filename", name);
@Nullable final String mimeTypeStr = atts.getValue("mimetype");
@Nullable final String dateCreatedStr = atts.getValue("created");
@Nullable final String dateModifiedStr = atts.getValue("modified");
@Nullable final String publicId = atts.getValue("publicid");
@Nullable final String systemId = atts.getValue("systemid");
@Nullable final String nameDocType = atts.getValue("namedoctype");
final XmldbURI docName;
if (version >= STRICT_URI_VERSION) {
docName = XmldbURI.create(name);
} else {
try {
docName = URIUtils.encodeXmldbUriFor(name);
} catch (final URISyntaxException e) {
final String msg = "Could not parse document name into a URI: " + e.getMessage();
listener.error(msg);
LOG.error(msg, e);
return new SkippedEntryDeferredPermission();
}
}
final EXistInputSource is;
if (deduplicateBlobs && !xmlType) {
final String blobId = atts.getValue("blob-id");
is = descriptor.getBlobInputSource(blobId);
if (is == null) {
final String msg = "Failed to restore resource '" + name + "'\nfrom BLOB '" + blobId + "'.\nReason: Unable to obtain its EXistInputSource";
listener.warn(msg);
return new SkippedEntryDeferredPermission();
}
} else {
is = descriptor.getInputSource(filename);
if (is == null) {
final String msg = "Failed to restore resource '" + name + "'\nfrom file '" + descriptor.getSymbolicPath(name, false) + "'.\nReason: Unable to obtain its EXistInputSource";
listener.warn(msg);
return new SkippedEntryDeferredPermission();
}
}
MimeType mimeType = null;
if (mimeTypeStr != null) {
mimeType = MimeTable.getInstance().getContentType(mimeTypeStr);
}
if (mimeType == null) {
mimeType = xmlType ? MimeType.XML_TYPE : MimeType.BINARY_TYPE;
}
Date dateCreated = null;
if (dateCreatedStr != null) {
try {
dateCreated = new DateTimeValue(dateCreatedStr).getDate();
} catch (final XPathException xpe) {
listener.warn("Illegal creation date. Ignoring date...");
}
}
Date dateModified = null;
if (dateModifiedStr != null) {
try {
dateModified = new DateTimeValue(dateModifiedStr).getDate();
} catch (final XPathException xpe) {
listener.warn("Illegal modification date. Ignoring date...");
}
}
final DocumentType docType;
if (publicId != null || systemId != null) {
docType = new DocumentTypeImpl(nameDocType, publicId, systemId);
} else {
docType = null;
}
final XmldbURI docUri = currentCollectionUri.append(docName);
try {
try (final Txn transaction = beginTransaction()) {
boolean validated = false;
try {
try (final Collection collection = broker.openCollection(currentCollectionUri, Lock.LockMode.WRITE_LOCK);
final ManagedDocumentLock docLock = broker.getBrokerPool().getLockManager().acquireDocumentWriteLock(docUri)) {
broker.storeDocument(transaction, docName, is, mimeType, dateCreated, dateModified, null, docType, null, collection);
validated = true;
transaction.commit();
// NOTE: early release of Collection lock inline with Asymmetrical Locking scheme
collection.close();
}
} finally {
/*
This allows us to commit the transaction (so the restore doesn't stop)
and still throw an exception to skip over resources that didn't
validate. This preserves eXist-db's previous behaviour
of "best effort attempt" when restoring a backup,
rather than an ACID "all or nothing" approach.
*/
if (!validated) {
// because `validated == false` we know that there have only been reads on the transaction/sub-transaction!
transaction.commit();
}
}
}
final DeferredPermission deferredPermission;
if (name.startsWith(XmldbURI.SYSTEM_COLLECTION)) {
// prevents restore of a backup from changing system collection resource ownership
deferredPermission = new ResourceDeferredPermission(listener, docUri, SecurityManager.SYSTEM, SecurityManager.DBA_GROUP, Integer.parseInt(perms, 8));
} else {
deferredPermission = new ResourceDeferredPermission(listener, docUri, owner, group, Integer.parseInt(perms, 8));
}
listener.restoredResource(name);
return deferredPermission;
} catch (final Exception e) {
final String message = String.format("Failed to restore resource '%s'\nfrom file '%s'.\nReason: %s", name, descriptor.getSymbolicPath(name, false), e.getMessage());
listener.warn(message);
LOG.error(message, e);
return new SkippedEntryDeferredPermission();
} finally {
is.close();
}
}
use of org.exist.dom.persistent.DocumentTypeImpl in project exist by eXist-db.
the class NativeSerializer method serializeToReceiver.
protected void serializeToReceiver(IStoredNode node, INodeIterator iter, DocumentImpl doc, boolean first, Match match, Set<String> namespaces) throws SAXException {
if (node == null && iter.hasNext()) {
node = iter.next();
}
if (node == null) {
return;
}
// char ch[];
String cdata;
switch(node.getNodeType()) {
case Node.ELEMENT_NODE:
receiver.setCurrentNode(node);
String defaultNS = null;
if (((ElementImpl) node).declaresNamespacePrefixes()) {
// declare namespaces used by this element
String prefix, uri;
for (final Iterator<String> i = ((ElementImpl) node).getPrefixes(); i.hasNext(); ) {
prefix = i.next();
if (prefix.isEmpty()) {
defaultNS = ((ElementImpl) node).getNamespaceForPrefix(prefix);
receiver.startPrefixMapping(XMLConstants.DEFAULT_NS_PREFIX, defaultNS);
namespaces.add(defaultNS);
} else {
uri = ((ElementImpl) node).getNamespaceForPrefix(prefix);
receiver.startPrefixMapping(prefix, uri);
namespaces.add(uri);
}
}
}
final String ns = defaultNS == null ? node.getNamespaceURI() : defaultNS;
if (ns != null && ns.length() > 0 && (!namespaces.contains(ns))) {
String prefix = node.getPrefix();
if (prefix == null) {
prefix = XMLConstants.DEFAULT_NS_PREFIX;
}
receiver.startPrefixMapping(prefix, ns);
}
final AttrList attribs = new AttrList();
if ((first && showId == EXIST_ID_ELEMENT) || showId == EXIST_ID_ALL) {
attribs.addAttribute(ID_ATTRIB, node.getNodeId().toString());
/*
* This is a proposed fix-up that the serializer could do
* to make sure elements always have the namespace declarations
*
} else {
// This is fix-up for when the node has a namespace but there is no
// namespace declaration.
String elementNS = node.getNamespaceURI();
Node parent = node.getParentNode();
if (parent instanceof ElementImpl) {
ElementImpl parentElement = (ElementImpl)parent;
String declaredNS = parentElement.getNamespaceForPrefix(node.getPrefix());
if (elementNS!=null && declaredNS==null) {
// We need to declare the prefix as it was missed somehow
receiver.startPrefixMapping(node.getPrefix(), elementNS);
} else if (elementNS==null && declaredNS!=null) {
// We need to declare the default namespace to be the no namespace
receiver.startPrefixMapping(node.getPrefix(), elementNS);
} else if (!elementNS.equals(defaultNS)) {
// Same prefix but different namespace
receiver.startPrefixMapping(node.getPrefix(), elementNS);
}
} else if (elementNS!=null) {
// If the parent is the document, we must have a namespace
// declaration when there is a namespace URI.
receiver.startPrefixMapping(node.getPrefix(), elementNS);
}
*/
}
if (first && showId > 0) {
// String src = doc.getCollection().getName() + "/" + doc.getFileName();
attribs.addAttribute(SOURCE_ATTRIB, doc.getFileURI().toString());
}
final int children = node.getChildCount();
int count = 0;
IStoredNode child = null;
StringBuilder matchAttrCdata = null;
StringBuilder matchAttrOffsetsCdata = null;
StringBuilder matchAttrLengthsCdata = null;
while (count < children) {
child = iter.hasNext() ? iter.next() : null;
if (child != null && child.getNodeType() == Node.ATTRIBUTE_NODE) {
if ((getHighlightingMode() & TAG_ATTRIBUTE_MATCHES) == TAG_ATTRIBUTE_MATCHES && match != null && child.getNodeId().equals(match.getNodeId())) {
if (matchAttrCdata == null) {
matchAttrCdata = new StringBuilder();
matchAttrOffsetsCdata = new StringBuilder();
matchAttrLengthsCdata = new StringBuilder();
} else {
matchAttrCdata.append(",");
matchAttrOffsetsCdata.append(",");
matchAttrLengthsCdata.append(",");
}
matchAttrCdata.append(child.getQName().toString());
matchAttrOffsetsCdata.append(match.getOffset(0).getOffset());
matchAttrLengthsCdata.append(match.getOffset(0).getLength());
match = match.getNextMatch();
}
cdata = ((AttrImpl) child).getValue();
attribs.addAttribute(child.getQName(), cdata);
count++;
child.release();
} else {
break;
}
}
if (matchAttrCdata != null) {
attribs.addAttribute(MATCHES_ATTRIB, matchAttrCdata.toString());
// mask the full-text index which doesn't provide offset and length
M_ZERO_VALUES.reset(matchAttrOffsetsCdata);
final boolean offsetsIsZero = M_ZERO_VALUES.matches();
M_ZERO_VALUES.reset(matchAttrLengthsCdata);
final boolean lengthsIsZero = M_ZERO_VALUES.matches();
if (!offsetsIsZero && !lengthsIsZero) {
attribs.addAttribute(MATCHES_OFFSET_ATTRIB, matchAttrOffsetsCdata.toString());
attribs.addAttribute(MATCHES_LENGTH_ATTRIB, matchAttrLengthsCdata.toString());
}
}
receiver.setCurrentNode(node);
receiver.startElement(node.getQName(), attribs);
while (count < children) {
serializeToReceiver(child, iter, doc, false, match, namespaces);
if (++count < children) {
child = iter.hasNext() ? iter.next() : null;
} else {
break;
}
}
receiver.setCurrentNode(node);
receiver.endElement(node.getQName());
if (((ElementImpl) node).declaresNamespacePrefixes()) {
for (final Iterator<String> i = ((ElementImpl) node).getPrefixes(); i.hasNext(); ) {
final String prefix = i.next();
receiver.endPrefixMapping(prefix);
}
}
if (ns != null && ns.length() > 0 && (!namespaces.contains(ns))) {
String prefix = node.getPrefix();
if (prefix == null) {
prefix = XMLConstants.DEFAULT_NS_PREFIX;
}
receiver.endPrefixMapping(prefix);
}
node.release();
break;
case Node.TEXT_NODE:
if (first && createContainerElements) {
final AttrList tattribs = new AttrList();
if (showId > 0) {
tattribs.addAttribute(ID_ATTRIB, node.getNodeId().toString());
tattribs.addAttribute(SOURCE_ATTRIB, doc.getFileURI().toString());
}
receiver.startElement(TEXT_ELEMENT, tattribs);
}
receiver.setCurrentNode(node);
receiver.characters(((TextImpl) node).getXMLString());
if (first && createContainerElements) {
receiver.endElement(TEXT_ELEMENT);
}
node.release();
break;
case Node.ATTRIBUTE_NODE:
if ((getHighlightingMode() & TAG_ATTRIBUTE_MATCHES) == TAG_ATTRIBUTE_MATCHES && match != null && node.getNodeId().equals(match.getNodeId())) {
// TODO(AR) do we need to expand attribute matches here also? see {@code matchAttrCdata} above
}
cdata = ((AttrImpl) node).getValue();
if (first) {
if (createContainerElements) {
final AttrList tattribs = new AttrList();
if (showId > 0) {
tattribs.addAttribute(ID_ATTRIB, node.getNodeId().toString());
tattribs.addAttribute(SOURCE_ATTRIB, doc.getFileURI().toString());
}
tattribs.addAttribute(node.getQName(), cdata);
receiver.startElement(ATTRIB_ELEMENT, tattribs);
receiver.endElement(ATTRIB_ELEMENT);
} else {
if (this.outputProperties.getProperty("output-method") != null && "text".equals(this.outputProperties.getProperty("output-method"))) {
receiver.characters(node.getNodeValue());
} else {
LOG.warn("Error SENR0001: attribute '{}' has no parent element. While serializing document {}", node.getQName(), doc.getURI());
throw new SAXException("Error SENR0001: attribute '" + node.getQName() + "' has no parent element");
}
}
} else {
receiver.attribute(node.getQName(), cdata);
}
node.release();
break;
case Node.DOCUMENT_TYPE_NODE:
final String systemId = ((DocumentTypeImpl) node).getSystemId();
final String publicId = ((DocumentTypeImpl) node).getPublicId();
final String name = ((DocumentTypeImpl) node).getName();
receiver.documentType(name, publicId, systemId);
break;
case Node.PROCESSING_INSTRUCTION_NODE:
receiver.processingInstruction(((ProcessingInstructionImpl) node).getTarget(), ((ProcessingInstructionImpl) node).getData());
node.release();
break;
case Node.COMMENT_NODE:
final String comment = ((CommentImpl) node).getData();
char[] data = new char[comment.length()];
comment.getChars(0, data.length, data, 0);
receiver.comment(data, 0, data.length);
node.release();
break;
case Node.CDATA_SECTION_NODE:
final String str = ((CDATASectionImpl) node).getData();
if (first) {
receiver.characters(str);
} else {
data = new char[str.length()];
str.getChars(0, str.length(), data, 0);
receiver.cdataSection(data, 0, data.length);
}
break;
}
}
use of org.exist.dom.persistent.DocumentTypeImpl in project exist by eXist-db.
the class NativeSerializer method serializeToReceiver.
protected void serializeToReceiver(DocumentImpl doc, boolean generateDocEvent) throws SAXException {
final long start = System.currentTimeMillis();
setDocument(doc);
final NodeList children = doc.getChildNodes();
if (generateDocEvent && !documentStarted) {
receiver.startDocument();
documentStarted = true;
}
if (doc.getDoctype() != null) {
if ("yes".equals(getProperty(EXistOutputKeys.OUTPUT_DOCTYPE, "no"))) {
final DocumentTypeImpl docType = (DocumentTypeImpl) doc.getDoctype();
serializeToReceiver(docType, null, docType.getOwnerDocument(), true, null, new TreeSet<>());
}
}
// iterate through children
for (int i = 0; i < children.getLength(); i++) {
final IStoredNode<?> node = (IStoredNode<?>) children.item(i);
try (final INodeIterator domIter = broker.getNodeIterator(node)) {
domIter.next();
final NodeProxy p = new NodeProxy(node);
serializeToReceiver(node, domIter, (DocumentImpl) node.getOwnerDocument(), true, p.getMatches(), new TreeSet<>());
} catch (final IOException ioe) {
LOG.warn("Unable to close node iterator", ioe);
}
}
if (generateDocEvent) {
receiver.endDocument();
}
if (LOG.isDebugEnabled()) {
LOG.debug("serializing document {} ({}) to SAX took {} msec", doc.getDocId(), doc.getURI(), System.currentTimeMillis() - start);
}
}
Aggregations