use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.
the class PtInsIntervalManager method generate_all_to_many.
/**
* Merge all the context sensitive intervals. The result is
* in the form (p, q, 0, I, L).
*/
private SegmentNode generate_all_to_many(SegmentNode mp) {
long left, right, t;
SegmentNode p;
left = mp.I2;
right = left + mp.L;
p = mp.next;
while (p != null) {
if (p.I2 < left)
left = p.I2;
t = p.I2 + p.L;
if (t > right)
right = t;
p = p.next;
}
mp.I1 = 0;
mp.I2 = left;
mp.L = right - left;
mp.next = null;
return mp;
}
use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.
the class FullSensitiveNode method heap_sensitive_intersection.
@Override
public boolean heap_sensitive_intersection(IVarAbstraction qv) {
int i, j;
FullSensitiveNode qn;
SegmentNode p, q, pt[], qt[];
boolean localToSameMethod;
qn = (FullSensitiveNode) qv;
localToSameMethod = (enclosingMethod() == qv.enclosingMethod());
for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext(); ) {
AllocNode an = it.next();
if (an instanceof ClassConstantNode)
continue;
if (an instanceof StringConstantNode)
continue;
qt = qn.find_points_to(an);
if (qt == null)
continue;
pt = find_points_to(an);
for (i = 0; i < GeometricManager.Divisions; ++i) {
p = pt[i];
while (p != null) {
for (j = 0; j < GeometricManager.Divisions; ++j) {
q = qt[j];
while (q != null) {
if (localToSameMethod) {
// We can use a more precise alias testing
if (p.intersect(q))
return true;
} else {
if (p.projYIntersect(q))
return true;
}
q = q.next;
}
}
p = p.next;
}
}
}
return false;
}
use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.
the class FullSensitiveNode method count_pts_intervals.
@Override
public int count_pts_intervals(AllocNode obj) {
int ret = 0;
SegmentNode[] int_entry = find_points_to(obj);
for (int j = 0; j < GeometricManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
++ret;
p = p.next;
}
}
return ret;
}
use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.
the class FullSensitiveNode method count_flow_intervals.
@Override
public int count_flow_intervals(IVarAbstraction qv) {
int ret = 0;
SegmentNode[] int_entry = find_flowto((FullSensitiveNode) qv);
for (int j = 0; j < GeometricManager.Divisions; ++j) {
SegmentNode p = int_entry[j];
while (p != null) {
++ret;
p = p.next;
}
}
return ret;
}
use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.
the class FullSensitiveNode method propagate.
/**
* The place where you implement the pointer assignment reasoning.
*/
@Override
public void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist) {
int i, j;
AllocNode obj;
SegmentNode pts, pe, entry_pts[], entry_pe[];
GeometricManager gm1, gm2;
FullSensitiveNode qn, objn;
boolean added, hasNewPointsTo;
if (pt_objs.size() == 0)
return;
// We first build the flow edges that flow in to/out of object fields
if (complex_cons != null) {
for (Map.Entry<AllocNode, GeometricManager> entry : new_pts.entrySet()) {
obj = entry.getKey();
entry_pts = entry.getValue().getFigures();
for (PlainConstraint pcons : complex_cons) {
// For each newly points-to object, construct its instance field
objn = (FullSensitiveNode) ptAnalyzer.findInstanceField(obj, pcons.f);
if (objn == null) {
// This combination of allocdotfield must be invalid
// This expression p.f also renders that p cannot point to obj, so we remove it
// We label this event and sweep the garbage later
pt_objs.put(obj, (GeometricManager) deadManager);
entry.setValue((GeometricManager) deadManager);
break;
}
if (objn.willUpdate == false) {
// the points-to information of the seed pointers
continue;
}
qn = (FullSensitiveNode) pcons.otherSide;
for (i = 0; i < GeometricManager.Divisions; ++i) {
pts = entry_pts[i];
while (pts != null && pts.is_new == true) {
switch(pcons.type) {
case Constants.STORE_CONS:
// Store, qv -> pv.field
if (instantiateStoreConstraint(qn, objn, pts, (pcons.code << 8) | i))
worklist.push(qn);
break;
case Constants.LOAD_CONS:
// Load, pv.field -> qv
if (instantiateLoadConstraint(objn, qn, pts, (pcons.code << 8) | i))
worklist.push(objn);
break;
}
pts = pts.next;
}
}
}
}
}
if (flowto.size() == 0)
return;
// Next, we process the assignments (e.g. p = q)
for (Map.Entry<FullSensitiveNode, GeometricManager> entry1 : flowto.entrySet()) {
added = false;
qn = entry1.getKey();
gm1 = entry1.getValue();
entry_pe = gm1.getFigures();
// We have new flow-to edges
if (gm1.isThereUnprocessedFigures()) {
// Second, we get the points-to shapes
for (Map.Entry<AllocNode, GeometricManager> entry2 : pt_objs.entrySet()) {
obj = entry2.getKey();
gm2 = entry2.getValue();
// Avoid the garbage
if (gm2 == deadManager)
continue;
// Type filtering and flow-to-this filtering, a simple approach
if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getType()))
continue;
entry_pts = gm2.getFigures();
hasNewPointsTo = gm2.isThereUnprocessedFigures();
// We pair up all the geometric points-to tuples and flow edges
for (j = 0; j < GeometricManager.Divisions; ++j) {
pe = entry_pe[j];
while (pe != null) {
if (pe.is_new == false && hasNewPointsTo == false)
break;
for (i = 0; i < GeometricManager.Divisions; ++i) {
pts = entry_pts[i];
while (pts != null && (pts.is_new || pe.is_new)) {
// Propagate this object
if (reasonAndPropagate(qn, obj, pts, pe, (i << 8) | j))
added = true;
pts = pts.next;
}
}
pe = pe.next;
}
}
}
gm1.flush();
} else {
for (Map.Entry<AllocNode, GeometricManager> entry2 : new_pts.entrySet()) {
obj = entry2.getKey();
gm2 = entry2.getValue();
// Avoid the garbage
if (gm2 == deadManager)
continue;
// Type filtering and flow-to-this filtering, a simple approach
if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getType()))
continue;
entry_pts = gm2.getFigures();
// We pair up all the geometric points-to tuples and flow edges
for (i = 0; i < GeometricManager.Divisions; ++i) {
pts = entry_pts[i];
while (pts != null && pts.is_new == true) {
for (j = 0; j < GeometricManager.Divisions; ++j) {
pe = entry_pe[j];
while (pe != null) {
// Propagate this object
if (reasonAndPropagate(qn, obj, pts, pe, (i << 8) | j))
added = true;
pe = pe.next;
}
}
pts = pts.next;
}
}
}
}
if (added)
worklist.push(qn);
}
// System.err.println();
}
Aggregations