use of org.apache.jackrabbit.core.data.DataIdentifier in project jackrabbit-oak by apache.
the class CachingDataStoreTest method getAllIdentifiers.
/**
* Get all Identifiers.
*/
@Test
public void getAllIdentifiers() throws Exception {
LOG.info("Starting getAllIdentifiers");
File f = copyToFile(randomStream(0, 4 * 1024), folder.newFile());
String id = getIdForInputStream(f);
FileInputStream fin = new FileInputStream(f);
closer.register(fin);
DataRecord rec = dataStore.addRecord(fin);
assertEquals(id, rec.getIdentifier().toString());
assertTrue(Iterators.contains(dataStore.getAllIdentifiers(), new DataIdentifier(id)));
//start & finish
taskLatch.countDown();
callbackLatch.countDown();
waitFinish();
assertTrue(Iterators.contains(dataStore.getAllIdentifiers(), new DataIdentifier(id)));
LOG.info("Finished getAllIdentifiers");
}
use of org.apache.jackrabbit.core.data.DataIdentifier in project jackrabbit-oak by apache.
the class CachingDataStoreTest method lazyLoadStream.
/**
* Add in datastore, invalidate from cache and lazy load record stream.
*/
@Test
public void lazyLoadStream() throws Exception {
LOG.info("Starting lazyLoadStream");
File f = copyToFile(randomStream(0, 4 * 1024), folder.newFile());
String id = getIdForInputStream(f);
FileInputStream fin = new FileInputStream(f);
closer.register(fin);
DataRecord rec = dataStore.addRecord(fin);
assertEquals(id, rec.getIdentifier().toString());
//start & finish
taskLatch.countDown();
callbackLatch.countDown();
waitFinish();
// Invalidate from the local cache
dataStore.getCache().invalidate(id);
// retrieve record from the datastore
rec = dataStore.getRecordIfStored(new DataIdentifier(id));
assertNotNull(rec);
assertEquals(id, rec.getIdentifier().toString());
// the file should not be in cache
File cached = dataStore.getCache().getIfPresent(id);
assertNull(cached);
// assert stream
assertFile(rec.getStream(), f, folder);
// Now should be available in the cache
cached = dataStore.getCache().getIfPresent(id);
assertNotNull(cached);
assertTrue(Files.equal(f, cached));
dataStore.deleteRecord(new DataIdentifier(id));
rec = dataStore.getRecordIfStored(new DataIdentifier(id));
assertNull(rec);
LOG.info("Finished lazyLoadStream");
}
use of org.apache.jackrabbit.core.data.DataIdentifier in project jackrabbit-oak by apache.
the class CachingDataStoreTest method zeroCacheAddGetDelete.
/**
* Add, get, delete when zero cache size.
* @throws Exception
*/
@Test
public void zeroCacheAddGetDelete() throws Exception {
LOG.info("Starting zeroCacheAddGetDelete");
dataStore.close();
init(1, 0, 0);
File f = copyToFile(randomStream(0, 4 * 1024), folder.newFile());
String id = getIdForInputStream(f);
FileInputStream fin = new FileInputStream(f);
closer.register(fin);
DataRecord rec = dataStore.addRecord(fin);
assertEquals(id, rec.getIdentifier().toString());
assertFile(rec.getStream(), f, folder);
rec = dataStore.getRecordIfStored(new DataIdentifier(id));
assertEquals(id, rec.getIdentifier().toString());
assertFile(rec.getStream(), f, folder);
assertEquals(1, Iterators.size(dataStore.getAllIdentifiers()));
dataStore.deleteRecord(new DataIdentifier(id));
rec = dataStore.getRecordIfStored(new DataIdentifier(id));
assertNull(rec);
LOG.info("Finished zeroCacheAddGetDelete");
}
use of org.apache.jackrabbit.core.data.DataIdentifier in project jackrabbit by apache.
the class DbDataStore method getAllIdentifiers.
public Iterator<DataIdentifier> getAllIdentifiers() throws DataStoreException {
ArrayList<DataIdentifier> list = new ArrayList<DataIdentifier>();
ResultSet rs = null;
try {
// SELECT ID FROM DATASTORE
rs = conHelper.query(selectAllSQL);
while (rs.next()) {
String id = rs.getString(1);
if (!id.startsWith(TEMP_PREFIX)) {
DataIdentifier identifier = new DataIdentifier(id);
list.add(identifier);
}
}
log.debug("Found " + list.size() + " identifiers.");
return list.iterator();
} catch (Exception e) {
throw convert("Can not read records", e);
} finally {
DbUtility.close(rs);
}
}
use of org.apache.jackrabbit.core.data.DataIdentifier in project jackrabbit-oak by apache.
the class AzureBlobStoreBackend method getMetadataRecord.
@Override
public DataRecord getMetadataRecord(String name) {
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
long start = System.currentTimeMillis();
try {
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
CloudBlobDirectory metaDir = getAzureContainer().getDirectoryReference(META_DIR_NAME);
CloudBlockBlob blob = metaDir.getBlockBlobReference(name);
if (!blob.exists()) {
LOG.warn("Trying to read missing metadata. metadataName={}", name);
return null;
}
blob.downloadAttributes();
long lastModified = blob.getProperties().getLastModified().getTime();
long length = blob.getProperties().getLength();
AzureBlobStoreDataRecord record = new AzureBlobStoreDataRecord(this, connectionString, containerName, new DataIdentifier(name), lastModified, length, true);
LOG.debug("Metadata record read. metadataName={} duration={} record={}", name, (System.currentTimeMillis() - start), record);
return record;
} catch (StorageException e) {
LOG.info("Error reading metadata record. metadataName={}", name, e);
throw new RuntimeException(e);
} catch (Exception e) {
LOG.debug("Error reading metadata record. metadataName={}", name, e);
throw new RuntimeException(e);
} finally {
if (null != contextClassLoader) {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
}
Aggregations