use of org.junit.AssumptionViolatedException in project jackrabbit-oak by apache.
the class NonLocalObservationIT method getFixture.
@Override
protected NodeStoreFixture getFixture() {
/**
* Fixes the cluster use case plus allowing to control the cache sizes.
* In theory other users of DocumentMongoFixture might have similar
* test cases - but keeping it simple for now - thus going via subclass.
*/
return new DocumentMongoFixture() {
private String clusterSuffix = System.currentTimeMillis() + "-NonLocalObservationIT";
private DB db;
/** keep a reference to the node stores so that the db only gets closed after the last nodeStore was closed */
private Set<NodeStore> nodeStores = new HashSet<NodeStore>();
/**
* This is not implemented in the super class at all.
* <ul>
* <li>use a specific suffix to make sure we have our own, new db and clean it up after the test</li>
* <li>properly drop that db created above in dispose</li>
* <li>use only 32MB (vs default of 256MB) memory to ensure we're not going OOM just because of this (which happens with the default)</li>
* <li>disable the persistent cache for the same reason</li>
* </ul>
*/
@Override
public NodeStore createNodeStore(int clusterNodeId) {
try {
DocumentMK.Builder builder = new DocumentMK.Builder();
// keep this one low to avoid OOME
builder.memoryCacheSize(32 * 1024 * 1024);
// turn this one off to avoid OOME
builder.setPersistentCache(null);
final String suffix = clusterSuffix;
// db will be overwritten - but that's fine
db = getDb(suffix);
builder.setMongoDB(db);
DocumentNodeStore ns = builder.getNodeStore();
nodeStores.add(ns);
return ns;
} catch (Exception e) {
throw new AssumptionViolatedException("Mongo instance is not available", e);
}
}
@Override
public void dispose(NodeStore nodeStore) {
super.dispose(nodeStore);
nodeStores.remove(nodeStore);
if (db != null && nodeStores.size() == 0) {
try {
db.dropDatabase();
db.getMongo().close();
db = null;
} catch (Exception e) {
log.error("dispose: Can't close Mongo", e);
}
}
}
@Override
public String toString() {
return "NonLocalObservationIT's DocumentMongoFixture flavour";
}
};
}
use of org.junit.AssumptionViolatedException in project poi by apache.
the class TestAllFiles method testAllFiles.
@Test
public void testAllFiles() throws Exception {
System.out.println("Reading " + file + " with " + handler.getClass());
assertNotNull("Unknown file extension for file: " + file + ": " + getExtension(file), handler);
File inputFile = new File(ROOT_DIR, file);
// special cases where docx-handling breaks, but OPCPackage handling works
boolean ignoredOPC = (file.endsWith(".docx") || file.endsWith(".xlsx") || file.endsWith(".xlsb") || file.endsWith(".pptx")) && handler instanceof OPCFileHandler;
boolean ignoreHPSF = (handler instanceof HPSFFileHandler);
try {
InputStream stream = new BufferedInputStream(new FileInputStream(inputFile), 64 * 1024);
try {
handler.handleFile(stream, file);
assertFalse("Expected to fail for file " + file + " and handler " + handler + ", but did not fail!", OLD_FILES_HWPF.contains(file) && !ignoreHPSF);
} finally {
stream.close();
}
handler.handleExtracting(inputFile);
assertFalse("Expected to fail for file " + file + " and handler " + handler + ", but did not fail!", EXPECTED_FAILURES.contains(file) && !ignoredOPC && !ignoreHPSF);
} catch (OldFileFormatException e) {
// for old word files we should still support extracting text
if (OLD_FILES_HWPF.contains(file)) {
handler.handleExtracting(inputFile);
} else {
// check if we expect failure for this file
if (!EXPECTED_FAILURES.contains(file) && !AbstractFileHandler.EXPECTED_EXTRACTOR_FAILURES.contains(file)) {
System.out.println("Failed: " + file);
throw new Exception("While handling " + file, e);
}
}
} catch (AssumptionViolatedException e) {
// file handler ignored this file
} catch (Exception e) {
// check if we expect failure for this file
if (!EXPECTED_FAILURES.contains(file) && !AbstractFileHandler.EXPECTED_EXTRACTOR_FAILURES.contains(file)) {
System.out.println("Failed: " + file);
throw new Exception("While handling " + file, e);
}
}
// let some file handlers do additional stuff
handler.handleAdditional(inputFile);
}
Aggregations