Search in sources :

Example 21 with SegmentNode

use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.

the class PtInsNode method count_pts_intervals.

@Override
public int count_pts_intervals(AllocNode obj) {
    int ret = 0;
    SegmentNode[] int_entry = find_points_to(obj);
    for (int j = 0; j < PtInsIntervalManager.Divisions; ++j) {
        SegmentNode p = int_entry[j];
        while (p != null) {
            ++ret;
            p = p.next;
        }
    }
    return ret;
}
Also used : PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Example 22 with SegmentNode

use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.

the class PtInsNode method propagate.

/**
 * An efficient implementation of differential propagation.
 */
@Override
public void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist) {
    int i, j;
    AllocNode obj;
    SegmentNode pts, pe, int_entry1[], int_entry2[];
    PtInsIntervalManager pim1, pim2;
    PtInsNode qn, objn;
    boolean added, has_new_edges;
    // We first build the new flow edges via the field dereferences
    if (complex_cons != null) {
        for (Map.Entry<AllocNode, PtInsIntervalManager> entry : new_pts.entrySet()) {
            obj = entry.getKey();
            int_entry1 = entry.getValue().getFigures();
            for (PlainConstraint pcons : complex_cons) {
                // Construct the two variables in assignment
                objn = (PtInsNode) ptAnalyzer.findAndInsertInstanceField(obj, pcons.f);
                if (objn == null) {
                    // This combination of allocdotfield must be invalid
                    // This expression p.f also renders that p cannot point to obj, so we remove it
                    // We label this event and sweep the garbage later
                    pt_objs.put(obj, (PtInsIntervalManager) deadManager);
                    entry.setValue((PtInsIntervalManager) deadManager);
                    break;
                }
                if (objn.willUpdate == false) {
                    // the points-to information of the seed pointers
                    continue;
                }
                qn = (PtInsNode) pcons.otherSide;
                for (i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
                    pts = int_entry1[i];
                    while (pts != null && pts.is_new) {
                        switch(pcons.type) {
                            case Constants.STORE_CONS:
                                // pts.I2 may be zero, pts.L may be less than zero
                                if (qn.add_simple_constraint_3(objn, pcons.code == GeometricManager.ONE_TO_ONE ? pts.I1 : 0, pts.I2, pts.L))
                                    worklist.push(qn);
                                break;
                            case Constants.LOAD_CONS:
                                // Load, pv.field -> qv
                                if (objn.add_simple_constraint_3(qn, pts.I2, pcons.code == GeometricManager.ONE_TO_ONE ? pts.I1 : 0, pts.L))
                                    worklist.push(objn);
                                break;
                        }
                        pts = pts.next;
                    }
                }
            }
        }
    }
    for (Map.Entry<PtInsNode, PtInsIntervalManager> entry1 : flowto.entrySet()) {
        // First, we get the flow-to intervals
        added = false;
        qn = entry1.getKey();
        pim1 = entry1.getValue();
        int_entry1 = pim1.getFigures();
        has_new_edges = pim1.isThereUnprocessedFigures();
        Map<AllocNode, PtInsIntervalManager> objs = (has_new_edges ? pt_objs : new_pts);
        for (Map.Entry<AllocNode, PtInsIntervalManager> entry2 : objs.entrySet()) {
            // Second, we get the points-to intervals
            obj = entry2.getKey();
            pim2 = entry2.getValue();
            if (pim2 == deadManager)
                continue;
            if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getWrappedNode().getType()))
                continue;
            int_entry2 = pim2.getFigures();
            // We pair up all the interval points-to tuples and interval flow edges
            for (i = 0; i < PtInsIntervalManager.Divisions; ++i) {
                pts = int_entry2[i];
                while (pts != null) {
                    if (!has_new_edges && !pts.is_new)
                        break;
                    for (j = 0; j < PtInsIntervalManager.Divisions; ++j) {
                        pe = int_entry1[j];
                        while (pe != null) {
                            if (pts.is_new || pe.is_new) {
                                // Propagate this object
                                if (add_new_points_to_tuple(pts, pe, obj, qn))
                                    added = true;
                            } else
                                break;
                            pe = pe.next;
                        }
                    }
                    pts = pts.next;
                }
            }
        }
        if (added)
            worklist.push(qn);
        // Now, we clean the new edges if necessary
        if (has_new_edges) {
            pim1.flush();
        }
    }
}
Also used : AllocNode(soot.jimple.spark.pag.AllocNode) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) HashMap(java.util.HashMap) Map(java.util.Map) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Example 23 with SegmentNode

use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.

the class PtInsNode method print_context_sensitive_points_to.

@Override
public void print_context_sensitive_points_to(PrintStream outPrintStream) {
    for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext(); ) {
        AllocNode obj = it.next();
        SegmentNode[] int_entry = find_points_to(obj);
        if (int_entry != null) {
            for (int j = 0; j < PtInsIntervalManager.Divisions; ++j) {
                SegmentNode p = int_entry[j];
                while (p != null) {
                    outPrintStream.println("(" + obj.toString() + ", " + p.I1 + ", " + p.I2 + ", " + p.L + ")");
                    p = p.next;
                }
            }
        }
    }
}
Also used : AllocNode(soot.jimple.spark.pag.AllocNode) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Example 24 with SegmentNode

use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.

the class PtInsNode method count_flow_intervals.

@Override
public int count_flow_intervals(IVarAbstraction qv) {
    int ret = 0;
    SegmentNode[] int_entry = find_flowto((PtInsNode) qv);
    for (int j = 0; j < PtInsIntervalManager.Divisions; ++j) {
        SegmentNode p = int_entry[j];
        while (p != null) {
            ++ret;
            p = p.next;
        }
    }
    return ret;
}
Also used : PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Example 25 with SegmentNode

use of soot.jimple.spark.geom.dataRep.SegmentNode in project soot by Sable.

the class PtInsNode method get_all_context_sensitive_objects.

@Override
public void get_all_context_sensitive_objects(long l, long r, PtSensVisitor visitor) {
    if (parent != this) {
        getRepresentative().get_all_context_sensitive_objects(l, r, visitor);
        return;
    }
    GeomPointsTo geomPTA = (GeomPointsTo) Scene.v().getPointsToAnalysis();
    for (Map.Entry<AllocNode, PtInsIntervalManager> entry : pt_objs.entrySet()) {
        AllocNode obj = entry.getKey();
        PtInsIntervalManager im = entry.getValue();
        SegmentNode[] int_entry = im.getFigures();
        // We first get the 1-CFA contexts for the object
        SootMethod sm = obj.getMethod();
        int sm_int = 0;
        long n_contexts = 1;
        if (sm != null) {
            sm_int = geomPTA.getIDFromSootMethod(sm);
            n_contexts = geomPTA.context_size[sm_int];
        }
        // We search for all the pointers falling in the range [1, r) that may point to this object
        for (int i = 0; i < PtInsIntervalManager.Divisions; ++i) {
            SegmentNode p = int_entry[i];
            while (p != null) {
                long R = p.I1 + p.L;
                long objL = -1, objR = -1;
                // Now we compute which context sensitive objects are pointed to by this pointer
                if (i == PtInsIntervalManager.ALL_TO_MANY) {
                    // all-to-many figures
                    objL = p.I2;
                    objR = p.I2 + p.L;
                } else {
                    // We compute the intersection
                    if (l <= p.I1 && p.I1 < r) {
                        if (i != PtInsIntervalManager.MANY_TO_ALL) {
                            long d = r - p.I1;
                            if (d > p.L)
                                d = p.L;
                            objL = p.I2;
                            objR = objL + d;
                        } else {
                            objL = 1;
                            objR = 1 + n_contexts;
                        }
                    } else if (p.I1 <= l && l < R) {
                        if (i != PtInsIntervalManager.MANY_TO_ALL) {
                            long d = R - l;
                            if (R > r)
                                d = r - l;
                            objL = p.I2 + l - p.I1;
                            objR = objL + d;
                        } else {
                            objL = 1;
                            objR = 1 + n_contexts;
                        }
                    }
                }
                // Now we test which context versions should this interval [objL, objR) maps to
                if (objL != -1 && objR != -1)
                    visitor.visit(obj, objL, objR, sm_int);
                p = p.next;
            }
        }
    }
}
Also used : AllocNode(soot.jimple.spark.pag.AllocNode) GeomPointsTo(soot.jimple.spark.geom.geomPA.GeomPointsTo) SootMethod(soot.SootMethod) HashMap(java.util.HashMap) Map(java.util.Map) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Aggregations

SegmentNode (soot.jimple.spark.geom.dataRep.SegmentNode)51 PlainConstraint (soot.jimple.spark.geom.dataRep.PlainConstraint)24 AllocNode (soot.jimple.spark.pag.AllocNode)12 HashMap (java.util.HashMap)6 Map (java.util.Map)6 RectangleNode (soot.jimple.spark.geom.dataRep.RectangleNode)5 SootMethod (soot.SootMethod)3 GeomPointsTo (soot.jimple.spark.geom.geomPA.GeomPointsTo)3 StringConstantNode (soot.jimple.spark.pag.StringConstantNode)3 ClassConstantNode (soot.jimple.spark.pag.ClassConstantNode)2