use of org.apache.jackrabbit.oak.spi.state.NodeStore in project jackrabbit-oak by apache.
the class RandomOpCompare method getMongo.
private static NodeStoreFixture getMongo() {
return new NodeStoreFixture() {
@Override
public NodeStore createNodeStore() {
MongoConnection connection;
try {
connection = new MongoConnection("mongodb://localhost:27017/oak");
} catch (Exception e) {
throw new RuntimeException(e);
}
DB mongoDB = connection.getDB();
return new DocumentMK.Builder().memoryCacheSize(0).setMongoDB(mongoDB, 16).setPersistentCache("target/persistentCache,time").getNodeStore();
}
@Override
public NodeStore createNodeStore(int clusterNodeId) {
return null;
}
@Override
public void dispose(NodeStore nodeStore) {
if (nodeStore instanceof Closeable) {
try {
((Closeable) nodeStore).close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
};
}
use of org.apache.jackrabbit.oak.spi.state.NodeStore in project jackrabbit-oak by apache.
the class SegmentNodeStoreFactory method activate.
@Activate
public void activate(ComponentContext context) throws IOException {
String role = property(ROLE, context);
// In secondaryNodeStore mode customBlobStore is always enabled
boolean isSecondaryStoreMode = "secondary".equals(role);
boolean customBlobStore = Boolean.parseBoolean(property(CUSTOM_BLOB_STORE, context)) || isSecondaryStoreMode;
boolean registerRepositoryDescriptors = Boolean.parseBoolean(property(REGISTER_DESCRIPTORS, context));
log.info("activate: SegmentNodeStore '" + role + "' starting.");
if (blobStore == null && customBlobStore) {
log.info("BlobStore use enabled. SegmentNodeStore would be initialized when BlobStore would be available");
return;
}
if (role != null) {
registrations = Closer.create();
OsgiWhiteboard whiteboard = new OsgiWhiteboard(context.getBundleContext());
final SegmentNodeStore store = SegmentNodeStoreService.registerSegmentStore(context, blobStore, statisticsProvider, registrations, whiteboard, role, registerRepositoryDescriptors);
if (store != null) {
Map<String, Object> props = new HashMap<String, Object>();
props.put(NodeStoreProvider.ROLE, role);
registrations.register(asCloseable(whiteboard.register(NodeStoreProvider.class, new NodeStoreProvider() {
@Override
public NodeStore getNodeStore() {
return store;
}
}, props)));
log.info("Registered NodeStoreProvider backed by SegmentNodeStore of type '{}'", role);
}
}
}
use of org.apache.jackrabbit.oak.spi.state.NodeStore in project jackrabbit-oak by apache.
the class TextExtractorMain method main.
public static void main(String[] args) throws Exception {
Closer closer = Closer.create();
String h = "tika [extract|report|generate]\n" + "\n" + "report : Generates a summary report related to binary data\n" + "extract : Performs the text extraction\n" + "generate : Generates the csv data file based on configured NodeStore/BlobStore";
try {
OptionParser parser = new OptionParser();
OptionSpec<?> help = parser.acceptsAll(asList("h", "?", "help"), "show help").forHelp();
OptionSpec<String> nodeStoreSpec = parser.accepts("nodestore", "NodeStore detail /path/to/oak/repository | mongodb://host:port/database").withRequiredArg().ofType(String.class);
OptionSpec<String> pathSpec = parser.accepts("path", "Path in repository under which the binaries would be searched").withRequiredArg().ofType(String.class);
OptionSpec<File> dataFileSpec = parser.accepts("data-file", "Data file in csv format containing the binary metadata").withRequiredArg().ofType(File.class);
OptionSpec<File> tikaConfigSpec = parser.accepts("tika-config", "Tika config file path").withRequiredArg().ofType(File.class);
OptionSpec<File> fdsDirSpec = parser.accepts("fds-path", "Path of directory used by FileDataStore").withRequiredArg().ofType(File.class);
OptionSpec<File> s3ConfigSpec = parser.accepts("s3-config-path", "Path of properties file containing config for S3DataStore").withRequiredArg().ofType(File.class);
OptionSpec<File> storeDirSpec = parser.accepts("store-path", "Path of directory used to store extracted text content").withRequiredArg().ofType(File.class);
OptionSpec<Integer> poolSize = parser.accepts("pool-size", "Size of the thread pool used to perform text extraction. Defaults " + "to number of cores on the system").withRequiredArg().ofType(Integer.class);
OptionSpec<String> nonOption = parser.nonOptions(h);
OptionSet options = parser.parse(args);
List<String> nonOptions = nonOption.values(options);
if (options.has(help)) {
parser.printHelpOn(System.out);
System.exit(0);
}
if (nonOptions.isEmpty()) {
parser.printHelpOn(System.err);
System.exit(1);
}
boolean report = nonOptions.contains("report");
boolean extract = nonOptions.contains("extract");
boolean generate = nonOptions.contains("generate");
File dataFile = null;
File storeDir = null;
File tikaConfigFile = null;
BlobStore blobStore = null;
BinaryResourceProvider binaryResourceProvider = null;
BinaryStats stats = null;
String path = "/";
if (options.has(tikaConfigSpec)) {
tikaConfigFile = tikaConfigSpec.value(options);
checkArgument(tikaConfigFile.exists(), "Tika config file %s does not exist", tikaConfigFile.getAbsolutePath());
}
if (options.has(storeDirSpec)) {
storeDir = storeDirSpec.value(options);
if (storeDir.exists()) {
checkArgument(storeDir.isDirectory(), "Path [%s] specified for storing extracted " + "text content '%s' is not a directory", storeDir.getAbsolutePath(), storeDirSpec.options());
}
}
if (options.has(fdsDirSpec)) {
File fdsDir = fdsDirSpec.value(options);
checkArgument(fdsDir.exists(), "FileDataStore %s does not exist", fdsDir.getAbsolutePath());
FileDataStore fds = new FileDataStore();
fds.setPath(fdsDir.getAbsolutePath());
fds.init(null);
blobStore = new DataStoreBlobStore(fds);
}
if (options.has(s3ConfigSpec)) {
File s3Config = s3ConfigSpec.value(options);
checkArgument(s3Config.exists() && s3Config.canRead(), "S3DataStore config cannot be read from [%s]", s3Config.getAbsolutePath());
Properties props = loadProperties(s3Config);
log.info("Loaded properties for S3DataStore from {}", s3Config.getAbsolutePath());
String pathProp = "path";
String repoPath = props.getProperty(pathProp);
checkNotNull(repoPath, "Missing required property [%s] from S3DataStore config loaded from [%s]", pathProp, s3Config);
//Check if 'secret' key is defined. It should be non null for references
//to be generated. As the ref are transient we can just use any random value
//if not specified
String secretConfig = "secret";
if (props.getProperty(secretConfig) == null) {
props.setProperty(secretConfig, UUID.randomUUID().toString());
}
log.info("Using {} for S3DataStore ", repoPath);
DataStore ds = createS3DataStore(props);
PropertiesUtil.populate(ds, toMap(props), false);
ds.init(pathProp);
blobStore = new DataStoreBlobStore(ds);
closer.register(asCloseable(ds));
}
if (options.has(dataFileSpec)) {
dataFile = dataFileSpec.value(options);
}
checkNotNull(dataFile, "Data file not configured with %s", dataFileSpec);
if (report || extract) {
checkArgument(dataFile.exists(), "Data file %s does not exist", dataFile.getAbsolutePath());
binaryResourceProvider = new CSVFileBinaryResourceProvider(dataFile, blobStore);
if (binaryResourceProvider instanceof Closeable) {
closer.register((Closeable) binaryResourceProvider);
}
stats = new BinaryStats(tikaConfigFile, binaryResourceProvider);
String summary = stats.getSummary();
log.info(summary);
}
if (generate) {
String src = nodeStoreSpec.value(options);
checkNotNull(blobStore, "BlobStore found to be null. FileDataStore directory " + "must be specified via %s", fdsDirSpec.options());
checkNotNull(dataFile, "Data file path not provided");
NodeStore nodeStore = bootStrapNodeStore(src, blobStore, closer);
BinaryResourceProvider brp = new NodeStoreBinaryResourceProvider(nodeStore, blobStore);
CSVFileGenerator generator = new CSVFileGenerator(dataFile);
generator.generate(brp.getBinaries(path));
}
if (extract) {
checkNotNull(storeDir, "Directory to store extracted text content " + "must be specified via %s", storeDirSpec.options());
checkNotNull(blobStore, "BlobStore found to be null. FileDataStore directory " + "must be specified via %s", fdsDirSpec.options());
DataStoreTextWriter writer = new DataStoreTextWriter(storeDir, false);
TextExtractor extractor = new TextExtractor(writer);
if (options.has(poolSize)) {
extractor.setThreadPoolSize(poolSize.value(options));
}
if (tikaConfigFile != null) {
extractor.setTikaConfig(tikaConfigFile);
}
if (options.has(pathSpec)) {
path = pathSpec.value(options);
}
closer.register(writer);
closer.register(extractor);
extractor.setStats(stats);
log.info("Using path {}", path);
extractor.extract(binaryResourceProvider.getBinaries(path));
extractor.close();
writer.close();
}
} catch (Throwable e) {
throw closer.rethrow(e);
} finally {
closer.close();
}
}
use of org.apache.jackrabbit.oak.spi.state.NodeStore in project jackrabbit-oak by apache.
the class IncludeIndexTest method prepare.
@Before
public void prepare() throws Exception {
NodeStore source = getSourceContainer().open();
try {
initContent(source);
} finally {
getSourceContainer().close();
}
String[] args = getArgs();
log.info("oak2oak {}", Joiner.on(' ').join(args));
OakUpgrade.main(args);
nodeStore = destination.open();
}
use of org.apache.jackrabbit.oak.spi.state.NodeStore in project jackrabbit-oak by apache.
the class UpgradeFromTwoSourcesTest method upgradeRepository.
@Before
public synchronized void upgradeRepository() throws Exception {
if (!upgradeComplete) {
final File sourceDir1 = new File(getTestDirectory(), "source1");
final File sourceDir2 = new File(getTestDirectory(), "source2");
sourceDir1.mkdirs();
sourceDir2.mkdirs();
final RepositoryImpl source1 = createSourceRepository(sourceDir1);
final RepositoryImpl source2 = createSourceRepository(sourceDir2);
final Session session1 = source1.login(CREDENTIALS);
final Session session2 = source2.login(CREDENTIALS);
try {
createSourceContent(session1);
createSourceContent2(session2);
} finally {
session1.save();
session2.save();
session1.logout();
session2.logout();
source1.shutdown();
source2.shutdown();
}
final NodeStore target = getTargetNodeStore();
doUpgradeRepository(sourceDir1, target, "/left");
doUpgradeRepository(sourceDir2, target, "/right", "/left/child2", "/left/child3");
fileStore.flush();
upgradeComplete = true;
}
}
Aggregations