use of de.lmu.ifi.dbs.elki.utilities.exceptions.AbortException in project elki by elki-project.
the class LuceneDistanceRangeQuery method getRangeForObject.
@Override
public void getRangeForObject(DBID obj, double range, ModifiableDoubleDBIDList neighbors) {
try {
Query query = mlt.like(ids.getOffset(obj));
is.search(query, new DocumentsCollector(ids, neighbors, range));
} catch (IOException e) {
throw new AbortException("I/O error in lucene.", e);
}
}
use of de.lmu.ifi.dbs.elki.utilities.exceptions.AbortException in project elki by elki-project.
the class LuceneDistanceKNNQuery method getKNNForDBID.
@Override
public KNNList getKNNForDBID(DBIDRef id, int k) {
try {
Query query = mlt.like(range.getOffset(id));
TopDocs topDocs = is.search(query, k);
int rk = topDocs.scoreDocs.length;
DoubleIntegerDBIDKNNList res = new DoubleIntegerDBIDKNNList(k, rk);
DBIDArrayIter it = range.iter();
for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
double dist = (scoreDoc.score > 0.) ? (1. / scoreDoc.score) : Double.POSITIVE_INFINITY;
it.seek(scoreDoc.doc);
res.add(dist, it);
}
return res;
} catch (IOException e) {
throw new AbortException("I/O error in lucene.", e);
}
}
use of de.lmu.ifi.dbs.elki.utilities.exceptions.AbortException in project elki by elki-project.
the class AbstractXTree method initializeFromFile.
/**
* To be called via the constructor if the tree is to be read from file.
*/
@Override
public void initializeFromFile(TreeIndexHeader hdr, PageFile<N> file) {
XTreeHeader header = (XTreeHeader) hdr;
super.dirCapacity = header.getDirCapacity();
super.leafCapacity = header.getLeafCapacity();
super.dirMinimum = header.getDirMinimum();
super.leafMinimum = header.getLeafMinimum();
settings.min_fanout = header.getMin_fanout();
this.num_elements = header.getNumberOfElements();
this.dimensionality = header.getDimensionality();
settings.max_overlap = header.getMaxOverlap();
long superNodeOffset = header.getSupernode_offset();
if (getLogger().isDebugging()) {
getLogger().debugFine(new StringBuilder(200).append(getClass()).append("\n file = ").append(file.getClass()).toString());
}
// reset page id maintenance
file.setNextPageID((int) (superNodeOffset / header.getPageSize()));
// read supernodes (if there are any)
if (superNodeOffset > 0) {
RandomAccessFile ra_file = ((PersistentPageFile<?>) file).getFile();
long offset = header.getReservedPages() * file.getPageSize() + superNodeOffset;
int bs = // omit this: 4 // EMPTY_PAGE or FILLED_PAGE ?
0 + // id
4 + // isLeaf
1 + // isSupernode
1 + // number of entries
4 + // capacity
4;
byte[] buffer = new byte[bs];
try {
// go to supernode region
ra_file.seek(offset);
while (ra_file.getFilePointer() + file.getPageSize() <= ra_file.length()) {
// file.countRead();
ra_file.read(buffer);
ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(buffer));
int id = ois.readInt();
// iLeaf
ois.readBoolean();
boolean supernode = ois.readBoolean();
if (!supernode) {
throw new IllegalStateException("Non-supernode at supernode position '" + superNodeOffset + "'");
}
int numEntries = ois.readInt();
int capacity = ois.readInt();
ois.close();
N page;
try {
page = getNodeClass().newInstance();
} catch (IllegalAccessException e) {
throw new AbortException("AccessException instantiating a supernode", e);
} catch (InstantiationException e) {
throw new AbortException("InstantiationException instantiating a supernode", e);
}
// file.countRead();
ra_file.seek(offset);
byte[] superbuffer = new byte[file.getPageSize() * (int) Math.ceil((double) capacity / dirCapacity)];
// increase offset for the next position seek
offset += superbuffer.length;
ra_file.read(superbuffer);
ois = new ObjectInputStream(new ByteArrayInputStream(buffer));
try {
// read from file and add to supernode map
page.readSuperNode(ois, this);
} catch (ClassNotFoundException e) {
throw new AbortException("ClassNotFoundException when loading a supernode", e);
}
assert numEntries == page.getNumEntries();
assert capacity == page.getCapacity();
assert id == page.getPageID();
}
} catch (IOException e) {
throw new RuntimeException("IOException caught when loading tree from file." + e);
}
}
super.initialized = true;
// compute height
super.height = computeHeight();
if (getLogger().isDebugging()) {
getLogger().debugFine(new StringBuilder(100).append(getClass()).append("\n height = ").append(height).toString());
}
}
use of de.lmu.ifi.dbs.elki.utilities.exceptions.AbortException in project elki by elki-project.
the class LibSVMOneClassOutlierDetection method run.
/**
* Run one-class SVM.
*
* @param relation Data relation
* @return Outlier result.
*/
public OutlierResult run(Relation<V> relation) {
final int dim = RelationUtil.dimensionality(relation);
final ArrayDBIDs ids = DBIDUtil.ensureArray(relation.getDBIDs());
svm.svm_set_print_string_function(LOG_HELPER);
svm_parameter param = new svm_parameter();
param.svm_type = svm_parameter.ONE_CLASS;
param.kernel_type = svm_parameter.LINEAR;
param.degree = 3;
switch(kernel) {
case LINEAR:
param.kernel_type = svm_parameter.LINEAR;
break;
case QUADRATIC:
param.kernel_type = svm_parameter.POLY;
param.degree = 2;
break;
case CUBIC:
param.kernel_type = svm_parameter.POLY;
param.degree = 3;
break;
case RBF:
param.kernel_type = svm_parameter.RBF;
break;
case SIGMOID:
param.kernel_type = svm_parameter.SIGMOID;
break;
default:
throw new AbortException("Invalid kernel parameter: " + kernel);
}
// TODO: expose additional parameters to the end user!
param.nu = nu;
param.coef0 = 0.;
param.cache_size = 10000;
param.C = 1;
// not used by one-class?
param.eps = 1e-4;
// not used by one-class?
param.p = 0.1;
param.shrinking = 0;
param.probability = 0;
param.nr_weight = 0;
param.weight_label = new int[0];
param.weight = new double[0];
param.gamma = 1. / dim;
// Transform data:
svm_problem prob = new svm_problem();
prob.l = relation.size();
prob.x = new svm_node[prob.l][];
prob.y = new double[prob.l];
{
DBIDIter iter = ids.iter();
for (int i = 0; i < prob.l && iter.valid(); iter.advance(), i++) {
V vec = relation.get(iter);
// TODO: support compact sparse vectors, too!
svm_node[] x = new svm_node[dim];
for (int d = 0; d < dim; d++) {
x[d] = new svm_node();
x[d].index = d + 1;
x[d].value = vec.doubleValue(d);
}
prob.x[i] = x;
prob.y[i] = +1;
}
}
if (LOG.isVerbose()) {
LOG.verbose("Training one-class SVM...");
}
String err = svm.svm_check_parameter(prob, param);
if (err != null) {
LOG.warning("svm_check_parameter: " + err);
}
svm_model model = svm.svm_train(prob, param);
if (LOG.isVerbose()) {
LOG.verbose("Predicting...");
}
WritableDoubleDataStore scores = DataStoreUtil.makeDoubleStorage(relation.getDBIDs(), DataStoreFactory.HINT_DB);
DoubleMinMax mm = new DoubleMinMax();
{
DBIDIter iter = ids.iter();
double[] buf = new double[svm.svm_get_nr_class(model)];
for (int i = 0; i < prob.l && iter.valid(); iter.advance(), i++) {
V vec = relation.get(iter);
svm_node[] x = new svm_node[dim];
for (int d = 0; d < dim; d++) {
x[d] = new svm_node();
x[d].index = d + 1;
x[d].value = vec.doubleValue(d);
}
svm.svm_predict_values(model, x, buf);
// / param.gamma; // Heuristic rescaling, sorry.
double score = -buf[0];
// Unfortunately, libsvm one-class currently yields a binary decision.
scores.putDouble(iter, score);
mm.put(score);
}
}
DoubleRelation scoreResult = new MaterializedDoubleRelation("One-Class SVM Decision", "svm-outlier", scores, ids);
OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta(mm.getMin(), mm.getMax(), Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, 0.);
return new OutlierResult(scoreMeta, scoreResult);
}
use of de.lmu.ifi.dbs.elki.utilities.exceptions.AbortException in project elki by elki-project.
the class LuceneDatabase method initialize.
@Override
public void initialize() {
try {
reader = IndexReader.open(directory);
ids = DBIDUtil.generateStaticDBIDRange(reader.maxDoc());
// ID relation:
idrep = new DBIDView(ids);
relations.add(idrep);
getHierarchy().add(this, idrep);
// Documents relation:
docrep = new LuceneDocumentRelation(ids, reader);
relations.add(docrep);
getHierarchy().add(this, docrep);
eventManager.fireObjectsInserted(ids);
} catch (CorruptIndexException e) {
throw new AbortException("Index is corrupt.", e);
} catch (IOException e) {
throw new AbortException("I/O error reading index.", e);
}
}
Aggregations