use of org.exist.xmldb.XmldbURI in project exist by eXist-db.
the class InteractiveClient method findGZipRecursive.
private synchronized boolean findGZipRecursive(final Collection collection, final Path dir, final XmldbURI base) throws XMLDBException, IOException {
final List<Path> files = FileUtils.list(dir);
Collection c;
Resource document;
EXistCollectionManagementService mgtService;
// The XmldbURIs here aren't really used...
XmldbURI next;
MimeType mimeType;
int i = 0;
for (final Path file : files) {
i++;
next = base.append(FileUtils.fileName(file));
try {
if (Files.isDirectory(file)) {
messageln("entering directory " + file.toAbsolutePath().toString());
c = collection.getChildCollection(FileUtils.fileName(file));
if (c == null) {
mgtService = (EXistCollectionManagementService) collection.getService("CollectionManagementService", "1.0");
c = mgtService.createCollection(XmldbURI.xmldbUriFor(FileUtils.fileName(file)));
}
if (c instanceof Observable && options.verbose) {
final ProgressObserver observer = new ProgressObserver();
((Observable) c).addObserver(observer);
}
findGZipRecursive(c, file, next);
} else {
final long start1 = System.currentTimeMillis();
final String compressedName = FileUtils.fileName(file);
String localName = compressedName;
final String[] cSuffix = { ".gz", ".Z" };
boolean isCompressed = false;
for (final String suf : cSuffix) {
if (localName.endsWith(suf)) {
// Removing compressed prefix to validate
localName = compressedName.substring(0, localName.length() - suf.length());
isCompressed = true;
break;
}
}
mimeType = MimeTable.getInstance().getContentTypeFor(localName);
if (mimeType == null) {
messageln("File " + compressedName + " has an unknown suffix. Cannot determine file type.");
mimeType = MimeType.BINARY_TYPE;
}
message("storing document " + compressedName + " (" + i + " of " + files.size() + ") " + "...");
document = collection.createResource(compressedName, mimeType.getXMLDBType());
document.setContent(isCompressed ? new GZIPInputSource(file) : file);
((EXistResource) document).setMimeType(mimeType.getName());
collection.storeResource(document);
++filesCount;
messageln(" " + Files.size(file) + (isCompressed ? " compressed" : "") + " bytes in " + (System.currentTimeMillis() - start1) + "ms.");
}
} catch (final URISyntaxException e) {
errorln("uri syntax exception parsing " + file.toAbsolutePath().toString() + ": " + e.getMessage());
}
}
return true;
}
use of org.exist.xmldb.XmldbURI in project exist by eXist-db.
the class CollectionCache method prepare.
@Override
public void prepare(final BrokerPool brokerPool) throws BrokerPoolServiceException {
final Weigher<String, Collection> collectionWeigher = (uri, collection) -> collection.getMemorySizeNoLock();
this.statsCounter = new ConcurrentStatsCounter();
this.cache = Caffeine.<XmldbURI, Collection>newBuilder().maximumWeight(maxCacheSize).weigher(collectionWeigher).recordStats(() -> statsCounter).build();
}
use of org.exist.xmldb.XmldbURI in project exist by eXist-db.
the class CollectionConfigurationManager method loadAllConfigurations.
protected void loadAllConfigurations(DBBroker broker, Collection configCollection) throws CollectionConfigurationException, PermissionDeniedException, LockException {
if (configCollection == null) {
return;
}
loadConfiguration(broker, configCollection);
final XmldbURI path = configCollection.getURI();
for (final Iterator<XmldbURI> i = configCollection.collectionIterator(broker); i.hasNext(); ) {
final XmldbURI childName = i.next();
final Collection child = broker.getCollection(path.appendInternal(childName));
if (child == null) {
LOG.error("Collection is registered but could not be loaded: {}", childName);
}
loadAllConfigurations(broker, child);
}
}
use of org.exist.xmldb.XmldbURI in project exist by eXist-db.
the class CollectionConfigurationManager method addConfiguration.
/**
* Add a new collection configuration. The XML document is passed as a
* string.
*
* @param txn The transaction that will hold the WRITE locks until they are
* released by commit()/abort()
* @param broker the eXist-db broker
* @param collection the collection to which the configuration applies.
* @param config the xconf document as a String.
* @throws CollectionConfigurationException if config is invalid
*/
public void addConfiguration(final Txn txn, final DBBroker broker, final Collection collection, final String config) throws CollectionConfigurationException {
try {
final XmldbURI path = CONFIG_COLLECTION_URI.append(collection.getURI());
final Collection confCol = broker.getOrCreateCollection(txn, path);
if (confCol == null) {
throw new CollectionConfigurationException("Failed to create config collection: " + path);
}
XmldbURI configurationDocumentName = null;
// Replaces the current configuration file if there is one
final CollectionConfiguration conf = getConfiguration(collection);
if (conf != null) {
configurationDocumentName = conf.getDocName();
if (configurationDocumentName != null) {
LOG.warn("Replacing current configuration file '{}'", configurationDocumentName);
}
}
if (configurationDocumentName == null) {
configurationDocumentName = CollectionConfiguration.DEFAULT_COLLECTION_CONFIG_FILE_URI;
}
broker.saveCollection(txn, confCol);
broker.storeDocument(txn, configurationDocumentName, new StringInputSource(config), MimeType.XML_TYPE, confCol);
// broker.sync(Sync.MAJOR_SYNC);
} catch (final CollectionConfigurationException e) {
throw e;
} catch (final Exception e) {
throw new CollectionConfigurationException("Failed to store collection configuration: " + e.getMessage(), e);
}
}
use of org.exist.xmldb.XmldbURI in project exist by eXist-db.
the class SystemExport method export.
/**
* Export a collection. Write out the collection metadata and save the resources stored in the collection.
*
* @param current the collection
* @param output the output writer
* @param date
* @param prevBackup DOCUMENT ME!
* @param errorList a list of {@link org.exist.backup.ErrorReport} objects as returned by methods in {@link org.exist.backup.ConsistencyCheck}
* @param docs a document set to keep track of all written documents.
* @throws IOException
* @throws SAXException
* @throws TerminatedException DOCUMENT ME!
*/
private void export(final BackupHandler bh, final Collection current, final BackupWriter output, final Date date, final BackupDescriptor prevBackup, final List<ErrorReport> errorList, final MutableDocumentSet docs) throws IOException, SAXException, TerminatedException, PermissionDeniedException {
if ((monitor != null) && !monitor.proceed()) {
throw (new TerminatedException("system export terminated by db"));
}
// if( !current.getURI().equalsInternal( XmldbURI.ROOT_COLLECTION_URI ) ) {
output.newCollection(Backup.encode(URIUtils.urlDecodeUtf8(current.getURI())));
// }
final SAXSerializer serializer = (SAXSerializer) SerializerPool.getInstance().borrowObject(SAXSerializer.class);
try {
final Writer contents = output.newContents();
// serializer writes to __contents__.xml
serializer.setOutput(contents, contentsOutputProps);
final Permission perm = current.getPermissionsNoLock();
serializer.startDocument();
serializer.startPrefixMapping("", Namespaces.EXIST_NS);
final XmldbURI uri = current.getURI();
final AttributesImpl attr = new AttributesImpl();
attr.addAttribute(Namespaces.EXIST_NS, "name", "name", "CDATA", uri.toString());
attr.addAttribute(Namespaces.EXIST_NS, "version", "version", "CDATA", String.valueOf(currVersion));
Backup.writeUnixStylePermissionAttributes(attr, perm);
try {
attr.addAttribute(Namespaces.EXIST_NS, "created", "created", "CDATA", new DateTimeValue(new Date(current.getCreated())).getStringValue());
} catch (final XPathException e) {
e.printStackTrace();
}
bh.backup(current, attr);
serializer.startElement(Namespaces.EXIST_NS, "collection", "collection", attr);
if (perm instanceof ACLPermission) {
Backup.writeACLPermission(serializer, (ACLPermission) perm);
}
bh.backup(current, serializer);
final int docsCount = current.getDocumentCountNoLock(broker);
int count = 0;
for (final Iterator<DocumentImpl> i = current.iteratorNoLock(broker); i.hasNext(); count++) {
final DocumentImpl doc = i.next();
if (isDamaged(doc, errorList)) {
reportError("Skipping damaged document " + doc.getFileURI(), null);
continue;
}
if (doc.getFileURI().equalsInternal(CONTENTS_URI) || doc.getFileURI().equalsInternal(LOST_URI)) {
// skip __contents__.xml documents
continue;
}
exportDocument(bh, output, date, prevBackup, serializer, docsCount, count, doc);
docs.add(doc, false);
}
for (final Iterator<XmldbURI> i = current.collectionIteratorNoLock(broker); i.hasNext(); ) {
final XmldbURI childUri = i.next();
if (childUri.equalsInternal(TEMP_COLLECTION)) {
continue;
}
if (isDamagedChild(childUri, errorList)) {
reportError("Skipping damaged child collection " + childUri, null);
continue;
}
attr.clear();
attr.addAttribute(Namespaces.EXIST_NS, "name", "name", "CDATA", childUri.toString());
attr.addAttribute(Namespaces.EXIST_NS, "filename", "filename", "CDATA", Backup.encode(URIUtils.urlDecodeUtf8(childUri.toString())));
serializer.startElement(Namespaces.EXIST_NS, "subcollection", "subcollection", attr);
serializer.endElement(Namespaces.EXIST_NS, "subcollection", "subcollection");
}
if (prevBackup != null) {
// Check which collections and resources have been deleted since
// the
// last backup
final CheckDeletedHandler check = new CheckDeletedHandler(current, serializer);
try {
prevBackup.parse(broker.getBrokerPool().getParserPool(), check);
} catch (final Exception e) {
LOG.error("Caught exception while trying to parse previous backup descriptor: {}", prevBackup.getSymbolicPath(), e);
}
}
// close <collection>
serializer.endElement(Namespaces.EXIST_NS, "collection", "collection");
serializer.endPrefixMapping("");
serializer.endDocument();
output.closeContents();
} finally {
SerializerPool.getInstance().returnObject(serializer);
// if( !current.getURI().equalsInternal( XmldbURI.ROOT_COLLECTION_URI ) ) {
output.closeCollection();
// }
}
}
Aggregations