Search in sources :

Example 21 with DataStore

use of org.apache.jackrabbit.core.data.DataStore in project jackrabbit-oak by apache.

the class AbstractDataStoreService method activate.

protected void activate(ComponentContext context, Map<String, Object> config) throws RepositoryException {
    // change to mutable map. may be modified in createDS call
    config = Maps.newHashMap(config);
    DataStore ds = createDataStore(context, config);
    boolean encodeLengthInId = PropertiesUtil.toBoolean(config.get(PROP_ENCODE_LENGTH), true);
    int cacheSizeInMB = PropertiesUtil.toInteger(config.get(PROP_CACHE_SIZE), DataStoreBlobStore.DEFAULT_CACHE_SIZE);
    String homeDir = lookup(context, PROP_HOME);
    if (homeDir != null) {
        log.info("Initializing the DataStore with homeDir [{}]", homeDir);
    }
    PropertiesUtil.populate(ds, config, false);
    ds.init(homeDir);
    BlobStoreStats stats = new BlobStoreStats(getStatisticsProvider());
    this.dataStore = new DataStoreBlobStore(ds, encodeLengthInId, cacheSizeInMB);
    this.dataStore.setBlobStatsCollector(stats);
    PropertiesUtil.populate(dataStore, config, false);
    Dictionary<String, Object> props = new Hashtable<String, Object>();
    props.put(Constants.SERVICE_PID, ds.getClass().getName());
    props.put(DESCRIPTION, getDescription());
    if (context.getProperties().get(PROP_SPLIT_BLOBSTORE) != null) {
        props.put(PROP_SPLIT_BLOBSTORE, context.getProperties().get(PROP_SPLIT_BLOBSTORE));
    }
    reg = context.getBundleContext().registerService(new String[] { BlobStore.class.getName(), GarbageCollectableBlobStore.class.getName() }, dataStore, props);
    mbeanReg = registerMBeans(context.getBundleContext(), dataStore, stats);
}
Also used : Hashtable(java.util.Hashtable) DataStore(org.apache.jackrabbit.core.data.DataStore) BlobStoreStats(org.apache.jackrabbit.oak.plugins.blob.BlobStoreStats)

Example 22 with DataStore

use of org.apache.jackrabbit.core.data.DataStore in project jackrabbit-oak by apache.

the class DataStoreCheckTest method setup.

@Before
public void setup() throws Exception {
    if (S3DataStoreUtils.isS3Configured()) {
        Properties props = S3DataStoreUtils.getS3Config();
        props.setProperty("cacheSize", "0");
        container = props.getProperty(S3Constants.S3_BUCKET);
        DataStore ds = S3DataStoreUtils.getS3DataStore(S3DataStoreUtils.getFixtures().get(0), props, temporaryFolder.newFolder().getAbsolutePath());
        setupDataStore = new DataStoreBlobStore(ds);
        cfgFilePath = createTempConfig(temporaryFolder.newFile(), props);
        dsOption = "s3ds";
    } else if (AzureDataStoreUtils.isAzureConfigured()) {
        Properties props = AzureDataStoreUtils.getAzureConfig();
        props.setProperty("cacheSize", "0");
        container = props.getProperty(AzureConstants.AZURE_BLOB_CONTAINER_NAME);
        DataStore ds = AzureDataStoreUtils.getAzureDataStore(props, temporaryFolder.newFolder().getAbsolutePath());
        setupDataStore = new DataStoreBlobStore(ds);
        cfgFilePath = createTempConfig(temporaryFolder.newFile(), props);
        dsOption = "azureblobds";
    } else {
        OakFileDataStore delegate = new OakFileDataStore();
        dsPath = temporaryFolder.newFolder().getAbsolutePath();
        delegate.setPath(dsPath);
        delegate.init(null);
        setupDataStore = new DataStoreBlobStore(delegate);
        File cfgFile = temporaryFolder.newFile();
        Properties props = new Properties();
        props.put("path", dsPath);
        props.put("minRecordLength", new Long(4096));
        cfgFilePath = createTempConfig(cfgFile, props);
        dsOption = "fds";
    }
    File storeFile = temporaryFolder.newFolder();
    storePath = storeFile.getAbsolutePath();
    FileStore fileStore = FileStoreBuilder.fileStoreBuilder(storeFile).withBlobStore(setupDataStore).withMaxFileSize(256).withSegmentCacheSize(64).build();
    NodeStore store = SegmentNodeStoreBuilders.builder(fileStore).build();
    /* Create nodes with blobs stored in DS*/
    NodeBuilder a = store.getRoot().builder();
    int numBlobs = 10;
    blobsAdded = Sets.newHashSet();
    for (int i = 0; i < numBlobs; i++) {
        SegmentBlob b = (SegmentBlob) store.createBlob(randomStream(i, 18342));
        Iterator<String> idIter = setupDataStore.resolveChunks(b.getBlobId());
        while (idIter.hasNext()) {
            String chunk = idIter.next();
            blobsAdded.add(chunk);
        }
        a.child("c" + i).setProperty("x", b);
    }
    store.merge(a, EmptyHook.INSTANCE, CommitInfo.EMPTY);
    log.info("Created blobs : {}", blobsAdded);
    fileStore.close();
}
Also used : OakFileDataStore(org.apache.jackrabbit.oak.plugins.blob.datastore.OakFileDataStore) SegmentBlob(org.apache.jackrabbit.oak.segment.SegmentBlob) Properties(java.util.Properties) NodeBuilder(org.apache.jackrabbit.oak.spi.state.NodeBuilder) FileStore(org.apache.jackrabbit.oak.segment.file.FileStore) NodeStore(org.apache.jackrabbit.oak.spi.state.NodeStore) DataStore(org.apache.jackrabbit.core.data.DataStore) OakFileDataStore(org.apache.jackrabbit.oak.plugins.blob.datastore.OakFileDataStore) File(java.io.File) DataStoreBlobStore(org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore) Before(org.junit.Before)

Aggregations

DataStore (org.apache.jackrabbit.core.data.DataStore)22 FileDataStore (org.apache.jackrabbit.core.data.FileDataStore)9 Test (org.junit.Test)8 File (java.io.File)7 Properties (java.util.Properties)6 DataStoreBlobStore (org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore)5 IOException (java.io.IOException)4 RepositoryException (javax.jcr.RepositoryException)4 ByteArrayInputStream (java.io.ByteArrayInputStream)3 HashMap (java.util.HashMap)3 Random (java.util.Random)3 DataRecord (org.apache.jackrabbit.core.data.DataRecord)3 SharedS3DataStore (org.apache.jackrabbit.oak.blob.cloud.aws.s3.SharedS3DataStore)3 AbstractBlobStoreTest (org.apache.jackrabbit.oak.spi.blob.AbstractBlobStoreTest)3 BlobStore (org.apache.jackrabbit.oak.spi.blob.BlobStore)3 Closer (com.google.common.io.Closer)2 OptionParser (joptsimple.OptionParser)2 OptionSet (joptsimple.OptionSet)2 ClusterNode (org.apache.jackrabbit.core.cluster.ClusterNode)2 DataIdentifier (org.apache.jackrabbit.core.data.DataIdentifier)2