Search in sources :

Example 31 with SegmentNode

use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.

the class HeapInsNode method count_flow_intervals.

@Override
public int count_flow_intervals(IVarAbstraction qv) {
    int ret = 0;
    SegmentNode[] int_entry = find_flowto((HeapInsNode) qv);
    for (int j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
        SegmentNode p = int_entry[j];
        while (p != null) {
            ++ret;
            p = p.next;
        }
    }
    return ret;
}
Also used : PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Example 32 with SegmentNode

use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.

the class HeapInsNode method heap_sensitive_intersection.

/**
 * Query if this pointer and qv could point to the same object under any contexts
 */
@Override
public boolean heap_sensitive_intersection(IVarAbstraction qv) {
    int i, j;
    HeapInsNode qn;
    SegmentNode p, q, pt[], qt[];
    qn = (HeapInsNode) qv;
    for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext(); ) {
        AllocNode an = it.next();
        if (an instanceof ClassConstantNode)
            continue;
        if (an instanceof StringConstantNode)
            continue;
        qt = qn.find_points_to(an);
        if (qt == null)
            continue;
        pt = find_points_to(an);
        for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
            p = pt[i];
            while (p != null) {
                for (j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
                    q = qt[j];
                    while (q != null) {
                        if (quick_intersecting_test(p, q))
                            return true;
                        q = q.next;
                    }
                }
                p = p.next;
            }
        }
    }
    return false;
}
Also used : AllocNode(soot.jimple.spark.pag.AllocNode) StringConstantNode(soot.jimple.spark.pag.StringConstantNode) ClassConstantNode(soot.jimple.spark.pag.ClassConstantNode) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Example 33 with SegmentNode

use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.

the class HeapInsNode method count_pts_intervals.

@Override
public int count_pts_intervals(AllocNode obj) {
    int ret = 0;
    SegmentNode[] int_entry = find_points_to(obj);
    for (int j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
        SegmentNode p = int_entry[j];
        while (p != null) {
            ++ret;
            p = p.next;
        }
    }
    return ret;
}
Also used : PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Example 34 with SegmentNode

use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.

the class HeapInsNode method propagate.

/**
 * An efficient implementation of differential propagation.
 */
@Override
public void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist) {
    int i, j;
    AllocNode obj;
    SegmentNode pts, pe, int_entry1[], int_entry2[];
    HeapInsIntervalManager him1, him2;
    HeapInsNode qn, objn;
    boolean added, has_new_edges;
    // We first build the new flow edges via the field dereferences
    if (complex_cons != null) {
        for (Map.Entry<AllocNode, HeapInsIntervalManager> entry : new_pts.entrySet()) {
            obj = entry.getKey();
            int_entry1 = entry.getValue().getFigures();
            for (PlainConstraint pcons : complex_cons) {
                // Construct the two variables in assignment
                objn = (HeapInsNode) ptAnalyzer.findAndInsertInstanceField(obj, pcons.f);
                if (objn == null) {
                    // This combination of allocdotfield must be invalid
                    // This expression p.f also renders that p cannot point to obj, so we remove it
                    // We label this event and sweep the garbage later
                    pt_objs.put(obj, (HeapInsIntervalManager) deadManager);
                    entry.setValue((HeapInsIntervalManager) deadManager);
                    break;
                }
                if (objn.willUpdate == false) {
                    // the points-to information of the seed pointers
                    continue;
                }
                qn = (HeapInsNode) pcons.otherSide;
                for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
                    pts = int_entry1[i];
                    while (pts != null && pts.is_new) {
                        switch(pcons.type) {
                            case Constants.STORE_CONS:
                                // pts.I2 may be zero, pts.L may be less than zero
                                if (qn.add_simple_constraint_3(objn, pcons.code == GeometricManager.ONE_TO_ONE ? pts.I1 : 0, pts.I2, pts.L < 0 ? -pts.L : pts.L))
                                    worklist.push(qn);
                                break;
                            case Constants.LOAD_CONS:
                                // Load, pv.field -> qv
                                if (objn.add_simple_constraint_3(qn, pts.I2, pcons.code == GeometricManager.ONE_TO_ONE ? pts.I1 : 0, pts.L < 0 ? -pts.L : pts.L))
                                    worklist.push(objn);
                                break;
                        }
                        pts = pts.next;
                    }
                }
            }
        }
    }
    for (Map.Entry<HeapInsNode, HeapInsIntervalManager> entry1 : flowto.entrySet()) {
        // First, we pick one flow-to figure
        added = false;
        qn = entry1.getKey();
        him1 = entry1.getValue();
        // Figure collection for the flows-to tuple
        int_entry1 = him1.getFigures();
        has_new_edges = him1.isThereUnprocessedFigures();
        Map<AllocNode, HeapInsIntervalManager> objs = (has_new_edges ? pt_objs : new_pts);
        for (Map.Entry<AllocNode, HeapInsIntervalManager> entry2 : objs.entrySet()) {
            // Second, we get the points-to intervals
            obj = entry2.getKey();
            him2 = entry2.getValue();
            if (him2 == deadManager)
                continue;
            if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getWrappedNode().getType()))
                continue;
            // Figure collection for the points-to tuple
            int_entry2 = him2.getFigures();
            // Loop over all points-to figures
            for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
                pts = int_entry2[i];
                while (pts != null) {
                    if (!has_new_edges && !pts.is_new)
                        break;
                    // Loop over all flows-to figures
                    for (j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
                        pe = int_entry1[j];
                        while (pe != null) {
                            if (pts.is_new || pe.is_new) {
                                // Propagate this object
                                if (add_new_points_to_tuple(pts, pe, obj, qn))
                                    added = true;
                            } else
                                break;
                            pe = pe.next;
                        }
                    }
                    pts = pts.next;
                }
            }
        }
        if (added)
            worklist.push(qn);
        // Now, we clean the new edges if necessary
        if (has_new_edges)
            him1.flush();
    }
}
Also used : AllocNode(soot.jimple.spark.pag.AllocNode) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) HashMap(java.util.HashMap) Map(java.util.Map) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Example 35 with SegmentNode

use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.

the class PtInsIntervalManager method clean_garbage_many_to_all.

// Clean garbages in list that the information is already covered by mp
// BTW, we do some simple concatenation
private void clean_garbage_many_to_all(SegmentNode mp) {
    SegmentNode p, q, list;
    int num;
    long right, left;
    list = header[1];
    p = q = null;
    num = 0;
    left = mp.I1;
    right = left + mp.L;
    while (list != null) {
        if (list.I1 >= left) {
            if (list.I1 <= right) {
                if (list.I1 + list.L > right) {
                    // We extend mp to the right
                    right = list.I1 + list.L;
                }
                list = list.next;
                continue;
            }
        } else if (list.I1 + list.L >= left) {
            // We extend mp to the left
            left = list.I1;
            list = list.next;
            continue;
        }
        // Because the unprocessed points-to tuples are headed at the list
        if (q == null) {
            p = q = list;
        } else {
            q.next = list;
            q = list;
        }
        ++num;
        list = list.next;
    }
    mp.I1 = left;
    mp.L = right - left;
    if (q != null)
        q.next = null;
    header[1] = p;
    size[1] = num;
}
Also used : SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Aggregations

SegmentNode (soot.jimple.spark.geom.dataRep.SegmentNode)51 PlainConstraint (soot.jimple.spark.geom.dataRep.PlainConstraint)24 AllocNode (soot.jimple.spark.pag.AllocNode)12 HashMap (java.util.HashMap)6 Map (java.util.Map)6 RectangleNode (soot.jimple.spark.geom.dataRep.RectangleNode)5 SootMethod (soot.SootMethod)3 GeomPointsTo (soot.jimple.spark.geom.geomPA.GeomPointsTo)3 StringConstantNode (soot.jimple.spark.pag.StringConstantNode)3 ClassConstantNode (soot.jimple.spark.pag.ClassConstantNode)2