use of org.apache.jackrabbit.core.data.MultiDataStoreAware in project jackrabbit-oak by apache.
the class DataStoreBlobStore method countDeleteChunks.
@Override
public long countDeleteChunks(List<String> chunkIds, long maxLastModifiedTime) throws Exception {
int count = 0;
if (delegate instanceof MultiDataStoreAware) {
List<String> deleted = Lists.newArrayListWithExpectedSize(512);
for (String chunkId : chunkIds) {
String blobId = extractBlobId(chunkId);
DataIdentifier identifier = new DataIdentifier(blobId);
DataRecord dataRecord = getRecordForId(identifier);
boolean success = (maxLastModifiedTime <= 0) || dataRecord.getLastModified() <= maxLastModifiedTime;
log.trace("Deleting blob [{}] with last modified date [{}] : [{}]", blobId, dataRecord.getLastModified(), success);
if (success) {
((MultiDataStoreAware) delegate).deleteRecord(identifier);
deleted.add(blobId);
count++;
if (count % 512 == 0) {
log.info("Deleted blobs {}", deleted);
deleted.clear();
}
}
}
if (!deleted.isEmpty()) {
log.info("Deleted blobs {}", deleted);
}
}
return count;
}
use of org.apache.jackrabbit.core.data.MultiDataStoreAware in project jackrabbit-oak by apache.
the class AbstractDataStoreTest method doDeleteRecordTest.
/**
* Test {@link MultiDataStoreAware#deleteRecord(DataIdentifier)}.
*/
protected void doDeleteRecordTest() throws Exception {
Random random = randomGen;
byte[] data1 = new byte[dataLength];
random.nextBytes(data1);
DataRecord rec1 = ds.addRecord(new ByteArrayInputStream(data1));
byte[] data2 = new byte[dataLength];
random.nextBytes(data2);
DataRecord rec2 = ds.addRecord(new ByteArrayInputStream(data2));
byte[] data3 = new byte[dataLength];
random.nextBytes(data3);
DataRecord rec3 = ds.addRecord(new ByteArrayInputStream(data3));
((MultiDataStoreAware) ds).deleteRecord(rec1.getIdentifier());
assertNull("rec1 should be null", ds.getRecordIfStored(rec1.getIdentifier()));
assertEquals(new ByteArrayInputStream(data2), ds.getRecord(rec2.getIdentifier()).getStream());
assertEquals(new ByteArrayInputStream(data3), ds.getRecord(rec3.getIdentifier()).getStream());
}
use of org.apache.jackrabbit.core.data.MultiDataStoreAware in project jackrabbit by apache.
the class RepositoryConfigurationParser method getDataStoreFactory.
/**
* Parses data store configuration. Data store configuration uses the following format:
* <pre>
* <DataStore class="...">
* <param name="..." value="...">
* ...
* </DataStore>
* </pre>
* Its also possible to configure a multi data store. The configuration uses following format:
* <pre>
* <DataStore class="org.apache.jackrabbit.core.data.MultiDataStore">
* <param name="primary" value="org.apache.jackrabbit.core.data.db.XXDataStore">
* <param name="..." value="...">
* ...
* </param>
* <param name="archive" value="org.apache.jackrabbit.core.data.db.XXDataStore">
* <param name="..." value="...">
* ...
* </param>
* </DataStore>
* </pre>
* <p>
* <code>DataStore</code> is a {@link #parseBeanConfig(Element,String) bean configuration}
* element.
*
* @param parent configuration element
* @param directory the repository directory
* @return data store factory
* @throws ConfigurationException if the configuration is broken
*/
protected DataStoreFactory getDataStoreFactory(final Element parent, final String directory) throws ConfigurationException {
return new DataStoreFactory() {
public DataStore getDataStore() throws RepositoryException {
NodeList children = parent.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node child = children.item(i);
if (child.getNodeType() == Node.ELEMENT_NODE && DATA_STORE_ELEMENT.equals(child.getNodeName())) {
BeanConfig bc = parseBeanConfig(parent, DATA_STORE_ELEMENT);
bc.setValidate(false);
DataStore store = bc.newInstance(DataStore.class);
if (store instanceof MultiDataStore) {
DataStore primary = null;
DataStore archive = null;
NodeList subParamNodes = child.getChildNodes();
for (int x = 0; x < subParamNodes.getLength(); x++) {
Node paramNode = subParamNodes.item(x);
if (paramNode.getNodeType() == Node.ELEMENT_NODE && (PRIMARY_DATASTORE_ATTRIBUTE.equals(paramNode.getAttributes().getNamedItem("name").getNodeValue()) || ARCHIVE_DATASTORE_ATTRIBUTE.equals(paramNode.getAttributes().getNamedItem("name").getNodeValue()))) {
try {
Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
Element newParent = document.createElement("parent");
document.appendChild(newParent);
Element datastoreElement = document.createElement(DATA_STORE_ELEMENT);
newParent.appendChild(datastoreElement);
NodeList childNodes = paramNode.getChildNodes();
for (int y = 0; childNodes.getLength() > y; y++) {
datastoreElement.appendChild(document.importNode(childNodes.item(y), true));
}
NamedNodeMap attributes = paramNode.getAttributes();
for (int z = 0; attributes.getLength() > z; z++) {
Node item = attributes.item(z);
datastoreElement.setAttribute(CLASS_ATTRIBUTE, item.getNodeValue());
}
DataStore subDataStore = getDataStoreFactory(newParent, directory).getDataStore();
if (!MultiDataStoreAware.class.isAssignableFrom(subDataStore.getClass())) {
throw new ConfigurationException("Only MultiDataStoreAware datastore's can be used within a MultiDataStore.");
}
String type = getAttribute((Element) paramNode, NAME_ATTRIBUTE);
if (PRIMARY_DATASTORE_ATTRIBUTE.equals(type)) {
primary = subDataStore;
} else if (ARCHIVE_DATASTORE_ATTRIBUTE.equals(type)) {
archive = subDataStore;
}
} catch (Exception e) {
throw new ConfigurationException("Failed to parse the MultiDataStore element.", e);
}
}
}
if (primary == null || archive == null) {
throw new ConfigurationException("A MultiDataStore must have configured a primary and archive datastore");
}
((MultiDataStore) store).setPrimaryDataStore(primary);
((MultiDataStore) store).setArchiveDataStore(archive);
}
store.init(directory);
return store;
}
}
return null;
}
};
}
Aggregations