use of org.apache.jackrabbit.oak.segment.file.FileStore in project jackrabbit-oak by apache.
the class ActiveDeletedBlobSyncTrackerTest method createRepository.
@Override
protected ContentRepository createRepository() {
try {
File blobCollectorDeleted = new File(blobCollectionRoot.getRoot(), "deleted-blobs");
blobCollectorDeleted.mkdirs();
adbc = new ActiveDeletedBlobCollectorImpl(clock, new File(blobCollectionRoot.getRoot(), "deleted-blobs"), executorService);
IndexCopier copier = createIndexCopier();
editorProvider = new LuceneIndexEditorProvider(copier, null, new ExtractedTextCache(10 * FileUtils.ONE_MB, 100), null, Mounts.defaultMountInfoProvider(), adbc);
provider = new LuceneIndexProvider(copier);
OakFileDataStore ds = new OakFileDataStore();
ds.setMinRecordLength(10);
ds.init(fileDataStoreRoot.getRoot().getAbsolutePath());
DataStoreBlobStore dsbs = new DataStoreBlobStore(ds);
this.blobStore = new AbstractActiveDeletedBlobTest.CountingBlobStore(dsbs);
FileStore store = FileStoreBuilder.fileStoreBuilder(temporaryFolder.getRoot()).withMemoryMapping(false).withBlobStore(blobStore).build();
nodeStore = SegmentNodeStoreBuilders.builder(store).build();
BlobTrackingStore trackingStore = (BlobTrackingStore) blobStore;
trackingStore.addTracker(new BlobIdTracker(blobTrackerRoot.getRoot().getAbsolutePath(), getOrCreateId(nodeStore), 600, dsbs));
// set the blob store to skip writing blobs through the node store
editorProvider.setBlobStore(blobStore);
asyncIndexUpdate = new AsyncIndexUpdate("async", nodeStore, editorProvider);
return new Oak(nodeStore).with(new InitialContent()).with(new OpenSecurityProvider()).with((QueryIndexProvider) provider).with((Observer) provider).with(editorProvider).createContentRepository();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.apache.jackrabbit.oak.segment.file.FileStore in project jackrabbit-oak by apache.
the class ChunkedOakDirectoryTest method dirNameInException_Flush.
@Test
public void dirNameInException_Flush() throws Exception {
FailOnDemandBlobStore blobStore = new FailOnDemandBlobStore();
FileStore store = FileStoreBuilder.fileStoreBuilder(tempFolder.getRoot()).withMemoryMapping(false).withBlobStore(blobStore).build();
SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(store).build();
String indexPath = "/foo/bar";
int minFileSize = SegmentTestConstants.MEDIUM_LIMIT;
int blobSize = minFileSize + 1000;
builder = nodeStore.getRoot().builder();
builder.setProperty(LuceneIndexConstants.BLOB_SIZE, blobSize);
Directory dir = createDir(builder, false, indexPath);
IndexOutput o3 = dir.createOutput("test1.txt", IOContext.DEFAULT);
o3.writeBytes(randomBytes(minFileSize), minFileSize);
blobStore.startFailing();
try {
o3.flush();
fail();
} catch (IOException e) {
assertThat(e.getMessage(), containsString(indexPath));
assertThat(e.getMessage(), containsString("test1.txt"));
}
store.close();
}
use of org.apache.jackrabbit.oak.segment.file.FileStore in project jackrabbit-oak by apache.
the class CompositePrepareCommand method execute.
@Override
public void execute(String... args) throws Exception {
OptionParser parser = new OptionParser();
OptionSpec<?> help = parser.acceptsAll(asList("h", "?", "help"), "show help").forHelp();
OptionSpec<String> paths = parser.accepts("paths", "a list of paths to transform from nt:resource to oak:Resource").withRequiredArg().ofType(String.class).withValuesSeparatedBy(',').defaultsTo("/apps", "/libs");
OptionSpec<File> storeO = parser.nonOptions("path to segment store (required)").ofType(File.class);
OptionSet options = parser.parse(args);
if (options.has(help)) {
parser.printHelpOn(System.out);
System.exit(0);
}
File store = storeO.value(options);
if (store == null) {
parser.printHelpOn(System.out);
System.exit(1);
}
FileStore fs = fileStoreBuilder(store).withStrictVersionCheck(true).build();
try {
new OakResourceTransformer(fs, paths.values(options)).transform();
} finally {
fs.close();
}
}
use of org.apache.jackrabbit.oak.segment.file.FileStore in project jackrabbit-oak by apache.
the class DataStoreTestBase method testSync.
@Test
public void testSync() throws Exception {
final int blobSize = 5 * MB;
FileStore primary = getPrimary();
FileStore secondary = getSecondary();
NodeStore store = SegmentNodeStoreBuilders.builder(primary).build();
byte[] data = addTestContent(store, "server", blobSize);
try (StandbyServerSync serverSync = new StandbyServerSync(serverPort.getPort(), primary, MB);
StandbyClientSync cl = new StandbyClientSync(getServerHost(), serverPort.getPort(), secondary, false, getClientTimeout(), false, folder.newFolder())) {
serverSync.start();
primary.flush();
cl.run();
assertEquals(primary.getHead(), secondary.getHead());
}
assertTrue(primary.getStats().getApproximateSize() < MB);
assertTrue(secondary.getStats().getApproximateSize() < MB);
PropertyState ps = secondary.getHead().getChildNode("root").getChildNode("server").getProperty("testBlob");
assertNotNull(ps);
assertEquals(Type.BINARY.tag(), ps.getType().tag());
Blob b = ps.getValue(Type.BINARY);
assertEquals(blobSize, b.length());
byte[] testData = new byte[blobSize];
try (InputStream blobInputStream = b.getNewStream()) {
ByteStreams.readFully(blobInputStream, testData);
assertArrayEquals(data, testData);
}
}
use of org.apache.jackrabbit.oak.segment.file.FileStore in project jackrabbit-oak by apache.
the class CheckRepositoryTestBase method corruptPathFromCheckpoint.
protected void corruptPathFromCheckpoint() throws InvalidFileStoreVersionException, IOException {
FileStore fileStore = FileStoreBuilder.fileStoreBuilder(temporaryFolder.getRoot()).withMaxFileSize(256).withSegmentCacheSize(64).build();
SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(fileStore).build();
SegmentNodeState cp1 = (SegmentNodeState) nodeStore.retrieve(checkpoints.iterator().next());
RecordId bRecordId = ((SegmentNodeState) cp1.getChildNode("b")).getRecordId();
fileStore.close();
corruptRecord(bRecordId, "data00000a.tar");
}
Aggregations