use of de.lmu.ifi.dbs.elki.database.ids.DBIDVar in project elki by elki-project.
the class CLINK method clinkstep3.
/**
* Third step: Determine the values for P and L
*
* @param id the id of the object to be inserted into the pointer
* representation
* @param i Iterator
* @param n Stopping position
* @param pi Pi data store
* @param lambda Lambda data store
* @param m Distance data store
*/
private void clinkstep3(DBIDRef id, DBIDArrayIter i, int n, WritableDBIDDataStore pi, WritableDoubleDataStore lambda, WritableDoubleDataStore m) {
DBIDVar p_i = DBIDUtil.newVar();
// for i = 1..n
for (i.seek(0); i.getOffset() < n; i.advance()) {
double l_i = lambda.doubleValue(i);
double m_i = m.doubleValue(i);
// if L(i) < M(i)
if (l_i < m_i) {
// p_i = pi(it)
p_i.from(pi, i);
double mp_i = m.doubleValue(p_i);
// M(P(i)) = max { M(P(i)), M(i) }
if (mp_i < m_i) {
m.putDouble(p_i, m_i);
}
// M(i) = inf
m.putDouble(i, Double.POSITIVE_INFINITY);
}
}
}
use of de.lmu.ifi.dbs.elki.database.ids.DBIDVar in project elki by elki-project.
the class CLINK method clinkstep8.
/**
* Update hierarchy.
*
* @param id Current object
* @param it Iterator
* @param n Last object to process
* @param pi Parent data store
* @param lambda Height data store
* @param m Distance data store
*/
private void clinkstep8(DBIDRef id, DBIDArrayIter it, int n, WritableDBIDDataStore pi, WritableDoubleDataStore lambda, WritableDoubleDataStore m) {
DBIDVar p_i = DBIDUtil.newVar(), pp_i = DBIDUtil.newVar();
for (it.seek(0); it.getOffset() < n; it.advance()) {
// p_i = pi[i]
p_i.from(pi, it);
// pp_i = pi[pi[i]]
pp_i.from(pi, p_i);
if (DBIDUtil.equal(pp_i, id) && lambda.doubleValue(it) >= lambda.doubleValue(p_i)) {
pi.putDBID(it, id);
}
}
}
use of de.lmu.ifi.dbs.elki.database.ids.DBIDVar in project elki by elki-project.
the class CLINK method clinkstep4567.
/**
* Fourth to seventh step of CLINK: find best insertion
*
* @param id Current objct
* @param ids All objects
* @param it Iterator
* @param n Index threshold
* @param pi Parent data store
* @param lambda Height data store
* @param m Distance data store
*/
private void clinkstep4567(DBIDRef id, ArrayDBIDs ids, DBIDArrayIter it, int n, WritableDBIDDataStore pi, WritableDoubleDataStore lambda, WritableDoubleDataStore m) {
// step 4: a = n
DBIDArrayIter a = ids.iter().seek(n - 1);
// step 5:
{
DBIDVar p_i = DBIDUtil.newVar();
for (it.seek(n - 1); it.valid(); it.retract()) {
double l_i = lambda.doubleValue(it);
double mp_i = m.doubleValue(p_i.from(pi, it));
if (l_i >= mp_i) {
if (m.doubleValue(it) < m.doubleValue(a)) {
a.seek(it.getOffset());
}
} else {
m.putDouble(it, Double.POSITIVE_INFINITY);
}
}
}
// step 6
// b = pi[a]
DBIDVar b = DBIDUtil.newVar().from(pi, a);
double c = lambda.doubleValue(a);
pi.putDBID(a, id);
lambda.putDouble(a, m.doubleValue(a));
// step 7
if (a.getOffset() < n - 1) {
// Used below
DBIDRef last = DBIDUtil.newVar(it.seek(n - 1));
DBIDVar d = DBIDUtil.newVar();
// if b < n: (then goto 7)
while (!DBIDUtil.equal(b, id)) {
if (DBIDUtil.equal(b, last)) {
pi.putDBID(b, id);
lambda.putDouble(b, c);
break;
}
// d = pi[b]
d.from(pi, b);
// pi[b] = n + 1
pi.putDBID(b, id);
// c = old l[b], l[b] = c
c = lambda.putDouble(b, c);
// b = d = old pi[b]
b.set(d);
}
}
}
use of de.lmu.ifi.dbs.elki.database.ids.DBIDVar in project elki by elki-project.
the class SLINK method slinkstep4.
/**
* Fourth step: Actualize the clusters if necessary
*
* @param id the id of the current object
* @param it array iterator
* @param n Last object to process at this run
* @param pi Pi data store
* @param lambda Lambda data store
*/
private void slinkstep4(DBIDRef id, DBIDArrayIter it, int n, WritableDBIDDataStore pi, WritableDoubleDataStore lambda) {
DBIDVar p_i = DBIDUtil.newVar();
// for i = 1..n
for (it.seek(0); it.getOffset() < n; it.advance()) {
double l_i = lambda.doubleValue(it);
// p_i = pi(it)
p_i.from(pi, it);
double lp_i = lambda.doubleValue(p_i);
// if L(i) >= L(P(i))
if (l_i >= lp_i) {
// P(i) = n+1
pi.put(it, id);
}
}
}
use of de.lmu.ifi.dbs.elki.database.ids.DBIDVar in project elki by elki-project.
the class OPTICSXi method extractClusters.
/**
* Extract clusters from a cluster order result.
*
* @param clusterOrderResult cluster order result
* @param relation Relation
* @param ixi Parameter 1 - Xi
* @param minpts Parameter minPts
*/
private Clustering<OPTICSModel> extractClusters(ClusterOrder clusterOrderResult, Relation<?> relation, double ixi, int minpts) {
ArrayDBIDs clusterOrder = clusterOrderResult.ids;
DoubleDataStore reach = clusterOrderResult.reachability;
DBIDArrayIter tmp = clusterOrder.iter();
DBIDVar tmp2 = DBIDUtil.newVar();
double mib = 0.0;
List<SteepArea> salist = keepsteep ? new ArrayList<SteepArea>() : null;
List<SteepDownArea> sdaset = new ArrayList<>();
final Clustering<OPTICSModel> clustering = new Clustering<>("OPTICS Xi-Clusters", "optics");
HashSet<Cluster<OPTICSModel>> curclusters = new HashSet<>();
HashSetModifiableDBIDs unclaimedids = DBIDUtil.newHashSet(relation.getDBIDs());
FiniteProgress scanprog = LOG.isVerbose() ? new FiniteProgress("OPTICS Xi cluster extraction", clusterOrder.size(), LOG) : null;
for (SteepScanPosition scan = new SteepScanPosition(clusterOrderResult); scan.hasNext(); ) {
if (scanprog != null) {
scanprog.setProcessed(scan.index, LOG);
}
// Update maximum-inbetween
mib = MathUtil.max(mib, scan.getReachability());
// The last point cannot be the start of a steep area.
if (!scan.next.valid()) {
break;
}
// Xi-steep down area
if (scan.steepDown(ixi)) {
// Update mib values with current mib and filter
updateFilterSDASet(mib, sdaset, ixi);
final double startval = scan.getReachability();
mib = 0.;
int startsteep = scan.index, endsteep = scan.index;
for (scan.next(); scan.hasNext(); scan.next()) {
// still steep - continue.
if (scan.steepDown(ixi)) {
endsteep = scan.index;
continue;
}
// Always stop looking after minpts "flat" steps.
if (!scan.steepDown(1.0) || scan.index - endsteep > minpts) {
break;
}
}
final SteepDownArea sda = new SteepDownArea(startsteep, endsteep, startval, 0);
if (LOG.isDebuggingFinest()) {
LOG.debugFinest("New steep down area: " + sda.toString());
}
sdaset.add(sda);
if (salist != null) {
salist.add(sda);
}
continue;
}
// Xi-steep up area
if (scan.steepUp(ixi)) {
// Update mib values with current mib and filter
updateFilterSDASet(mib, sdaset, ixi);
final SteepUpArea sua;
// Compute steep-up area
{
int startsteep = scan.index, endsteep = scan.index;
mib = scan.getReachability();
double esuccr = scan.getNextReachability();
// Find end of steep-up-area, eventually updating mib again
while (!Double.isInfinite(esuccr) && scan.hasNext()) {
scan.next();
// still steep - continue.
if (scan.steepUp(ixi)) {
endsteep = scan.index;
mib = scan.getReachability();
esuccr = scan.getNextReachability();
continue;
}
// Stop looking after minpts non-up steps.
if (!scan.steepUp(1.0) || scan.index - endsteep > minpts) {
break;
}
}
if (Double.isInfinite(esuccr)) {
scan.next();
}
sua = new SteepUpArea(startsteep, endsteep, esuccr);
if (LOG.isDebuggingFinest()) {
LOG.debugFinest("New steep up area: " + sua.toString());
}
if (salist != null) {
salist.add(sua);
}
}
// Validate and computer clusters
// LOG.debug("SDA size:"+sdaset.size()+" "+sdaset);
ListIterator<SteepDownArea> sdaiter = sdaset.listIterator(sdaset.size());
// Iterate backwards for correct hierarchy generation.
while (sdaiter.hasPrevious()) {
SteepDownArea sda = sdaiter.previous();
if (LOG.isDebuggingFinest()) {
LOG.debugFinest("Comparing: eU=" + mib + " SDA: " + sda.toString());
}
// Condition 3b: end-of-steep-up > maximum-in-between lower
if (mib * ixi < sda.getMib()) {
if (LOG.isDebuggingFinest()) {
LOG.debugFinest("mib * ixi = " + mib * ixi + " >= sda.getMib() = " + sda.getMib());
}
continue;
}
// By default, clusters cover both the steep up and steep down area
int cstart = sda.getStartIndex(), cend = MathUtil.min(sua.getEndIndex(), clusterOrder.size() - 1);
// However, we sometimes have to adjust this (Condition 4):
{
// Case b)
if (sda.getMaximum() * ixi >= sua.getMaximum()) {
while (//
cstart < cend && reach.doubleValue(tmp.seek(cstart + 1)) > sua.getMaximum()) {
cstart++;
}
} else // Case c)
if (sua.getMaximum() * ixi >= sda.getMaximum()) {
while (//
cend > cstart && reach.doubleValue(tmp.seek(cend - 1)) > sda.getMaximum()) {
cend--;
}
}
// Case a) is the default
}
// removes common artifacts from the Xi method
if (!nocorrect) {
simplify: while (cend > cstart) {
clusterOrderResult.predecessor.assignVar(tmp.seek(cend), tmp2);
for (int i = cstart; i < cend; i++) {
if (DBIDUtil.equal(tmp2, tmp.seek(i))) {
break simplify;
}
}
// Not found.
--cend;
}
}
// Condition 3a: obey minpts
if (cend - cstart + 1 < minpts) {
if (LOG.isDebuggingFinest()) {
LOG.debugFinest("MinPts not satisfied.");
}
continue;
}
// Build the cluster
ModifiableDBIDs dbids = DBIDUtil.newArray();
for (int idx = cstart; idx <= cend; idx++) {
tmp.seek(idx);
// Collect only unclaimed IDs.
if (unclaimedids.remove(tmp)) {
dbids.add(tmp);
}
}
if (LOG.isDebuggingFine()) {
LOG.debugFine("Found cluster with " + dbids.size() + " new objects, length " + (cend - cstart + 1));
}
OPTICSModel model = new OPTICSModel(cstart, cend);
Cluster<OPTICSModel> cluster = new Cluster<>("Cluster_" + cstart + "_" + cend, dbids, model);
// Build the hierarchy
{
Iterator<Cluster<OPTICSModel>> iter = curclusters.iterator();
while (iter.hasNext()) {
Cluster<OPTICSModel> clus = iter.next();
OPTICSModel omodel = clus.getModel();
if (model.getStartIndex() <= omodel.getStartIndex() && omodel.getEndIndex() <= model.getEndIndex()) {
clustering.addChildCluster(cluster, clus);
iter.remove();
}
}
}
curclusters.add(cluster);
}
continue;
}
// Flat - advance anyway.
scan.next();
}
if (scanprog != null) {
scanprog.setProcessed(clusterOrder.size(), LOG);
}
if (!unclaimedids.isEmpty()) {
boolean noise = reach.doubleValue(tmp.seek(clusterOrder.size() - 1)) >= Double.POSITIVE_INFINITY;
Cluster<OPTICSModel> allcluster = new Cluster<>(noise ? "Noise" : "Cluster", unclaimedids, noise, new OPTICSModel(0, clusterOrder.size() - 1));
for (Cluster<OPTICSModel> cluster : curclusters) {
clustering.addChildCluster(allcluster, cluster);
}
clustering.addToplevelCluster(allcluster);
} else {
for (Cluster<OPTICSModel> cluster : curclusters) {
clustering.addToplevelCluster(cluster);
}
}
clustering.addChildResult(clusterOrderResult);
if (salist != null) {
clusterOrderResult.addChildResult(new SteepAreaResult(salist));
}
return clustering;
}
Aggregations