use of org.apache.jackrabbit.oak.plugins.blob.datastore.OakFileDataStore in project jackrabbit-oak by apache.
the class FileDataStoreFactory method create.
@Override
public BlobStore create(Closer closer) {
OakFileDataStore delegate = new OakFileDataStore();
delegate.setPath(directory);
delegate.init(null);
closer.register(asCloseable(delegate));
if (ignoreMissingBlobs) {
return new SafeDataStoreBlobStore(delegate);
} else {
return new DataStoreBlobStore(delegate);
}
}
use of org.apache.jackrabbit.oak.plugins.blob.datastore.OakFileDataStore in project jackrabbit-oak by apache.
the class ReferenceBinaryIT method createBlobStore.
private static BlobStore createBlobStore() {
File file = getTestDir("datastore");
OakFileDataStore fds = new OakFileDataStore();
byte[] key = new byte[256];
new Random().nextBytes(key);
fds.setReferenceKeyEncoded(BaseEncoding.base64().encode(key));
fds.setMinRecordLength(4092);
fds.init(file.getAbsolutePath());
return new DataStoreBlobStore(fds);
}
use of org.apache.jackrabbit.oak.plugins.blob.datastore.OakFileDataStore in project jackrabbit-oak by apache.
the class DataStoreCheckTest method setup.
@Before
public void setup() throws Exception {
if (S3DataStoreUtils.isS3Configured()) {
Properties props = S3DataStoreUtils.getS3Config();
props.setProperty("cacheSize", "0");
container = props.getProperty(S3Constants.S3_BUCKET);
DataStore ds = S3DataStoreUtils.getS3DataStore(S3DataStoreUtils.getFixtures().get(0), props, temporaryFolder.newFolder().getAbsolutePath());
setupDataStore = new DataStoreBlobStore(ds);
cfgFilePath = createTempConfig(temporaryFolder.newFile(), props);
dsOption = "s3ds";
} else if (AzureDataStoreUtils.isAzureConfigured()) {
Properties props = AzureDataStoreUtils.getAzureConfig();
props.setProperty("cacheSize", "0");
container = props.getProperty(AzureConstants.AZURE_BLOB_CONTAINER_NAME);
DataStore ds = AzureDataStoreUtils.getAzureDataStore(props, temporaryFolder.newFolder().getAbsolutePath());
setupDataStore = new DataStoreBlobStore(ds);
cfgFilePath = createTempConfig(temporaryFolder.newFile(), props);
dsOption = "azureblobds";
} else {
OakFileDataStore delegate = new OakFileDataStore();
dsPath = temporaryFolder.newFolder().getAbsolutePath();
delegate.setPath(dsPath);
delegate.init(null);
setupDataStore = new DataStoreBlobStore(delegate);
File cfgFile = temporaryFolder.newFile();
Properties props = new Properties();
props.put("path", dsPath);
props.put("minRecordLength", new Long(4096));
cfgFilePath = createTempConfig(cfgFile, props);
dsOption = "fds";
}
File storeFile = temporaryFolder.newFolder();
storePath = storeFile.getAbsolutePath();
FileStore fileStore = FileStoreBuilder.fileStoreBuilder(storeFile).withBlobStore(setupDataStore).withMaxFileSize(256).withSegmentCacheSize(64).build();
NodeStore store = SegmentNodeStoreBuilders.builder(fileStore).build();
/* Create nodes with blobs stored in DS*/
NodeBuilder a = store.getRoot().builder();
int numBlobs = 10;
blobsAdded = Sets.newHashSet();
blobsAddedWithNodes = Maps.newHashMap();
for (int i = 0; i < numBlobs; i++) {
SegmentBlob b = (SegmentBlob) store.createBlob(randomStream(i, 18342));
Iterator<String> idIter = setupDataStore.resolveChunks(b.getBlobId());
while (idIter.hasNext()) {
String chunk = idIter.next();
blobsAdded.add(chunk);
blobsAddedWithNodes.put(chunk, "/c" + i + "/x");
}
a.child("c" + i).setProperty("x", b);
}
store.merge(a, EmptyHook.INSTANCE, CommitInfo.EMPTY);
log.info("Created blobs : {}", blobsAdded);
fileStore.close();
}
use of org.apache.jackrabbit.oak.plugins.blob.datastore.OakFileDataStore in project jackrabbit-oak by apache.
the class FSBlobSerializer method createDataStore.
private DataStoreBlobStore createDataStore() {
FileDataStore fds = new OakFileDataStore();
fds.setPath(dir.getAbsolutePath());
fds.setMinRecordLength(maxInlineSize);
fds.init(null);
return new DataStoreBlobStore(fds);
}
Aggregations