Search in sources :

Example 11 with AllocNode

use of soot.jimple.spark.pag.AllocNode in project soot by Sable.

the class HeapInsNode method injectPts.

@Override
public void injectPts() {
    final GeomPointsTo geomPTA = (GeomPointsTo) Scene.v().getPointsToAnalysis();
    pt_objs = new HashMap<AllocNode, HeapInsIntervalManager>();
    me.getP2Set().forall(new P2SetVisitor() {

        @Override
        public void visit(Node n) {
            if (geomPTA.isValidGeometricNode(n))
                pt_objs.put((AllocNode) n, (HeapInsIntervalManager) stubManager);
        }
    });
    new_pts = null;
}
Also used : AllocNode(soot.jimple.spark.pag.AllocNode) GeomPointsTo(soot.jimple.spark.geom.geomPA.GeomPointsTo) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode) StringConstantNode(soot.jimple.spark.pag.StringConstantNode) RectangleNode(soot.jimple.spark.geom.dataRep.RectangleNode) ClassConstantNode(soot.jimple.spark.pag.ClassConstantNode) LocalVarNode(soot.jimple.spark.pag.LocalVarNode) Node(soot.jimple.spark.pag.Node) AllocNode(soot.jimple.spark.pag.AllocNode) P2SetVisitor(soot.jimple.spark.sets.P2SetVisitor)

Example 12 with AllocNode

use of soot.jimple.spark.pag.AllocNode in project soot by Sable.

the class HeapInsNode method get_all_context_sensitive_objects.

@Override
public void get_all_context_sensitive_objects(long l, long r, PtSensVisitor visitor) {
    if (parent != this) {
        getRepresentative().get_all_context_sensitive_objects(l, r, visitor);
        return;
    }
    GeomPointsTo geomPTA = (GeomPointsTo) Scene.v().getPointsToAnalysis();
    for (Map.Entry<AllocNode, HeapInsIntervalManager> entry : pt_objs.entrySet()) {
        AllocNode obj = entry.getKey();
        HeapInsIntervalManager im = entry.getValue();
        SegmentNode[] int_entry = im.getFigures();
        // We first get the 1-CFA contexts for the object
        SootMethod sm = obj.getMethod();
        int sm_int = 0;
        long n_contexts = 1;
        if (sm != null) {
            sm_int = geomPTA.getIDFromSootMethod(sm);
            n_contexts = geomPTA.context_size[sm_int];
        }
        // We search for all the pointers falling in the range [1, r) that may point to this object
        for (int i = 0; i < HeapInsIntervalManager.Divisions; ++i) {
            SegmentNode p = int_entry[i];
            while (p != null) {
                long R = p.I1 + p.L;
                long objL = -1, objR = -1;
                // Now we compute which context sensitive objects are pointed to by this pointer
                if (i == HeapInsIntervalManager.ALL_TO_MANY) {
                    // all-to-many figures
                    objL = p.I2;
                    objR = p.I2 + p.L;
                } else {
                    // We compute the intersection
                    if (l <= p.I1 && p.I1 < r) {
                        if (i != HeapInsIntervalManager.MANY_TO_ALL) {
                            long d = r - p.I1;
                            if (d > p.L)
                                d = p.L;
                            objL = p.I2;
                            objR = objL + d;
                        } else {
                            objL = 1;
                            objR = 1 + n_contexts;
                        }
                    } else if (p.I1 <= l && l < R) {
                        if (i != HeapInsIntervalManager.MANY_TO_ALL) {
                            long d = R - l;
                            if (R > r)
                                d = r - l;
                            objL = p.I2 + l - p.I1;
                            objR = objL + d;
                        } else {
                            objL = 1;
                            objR = 1 + n_contexts;
                        }
                    }
                }
                // Now we test which context versions should this interval [objL, objR) maps to
                if (objL != -1 && objR != -1)
                    visitor.visit(obj, objL, objR, sm_int);
                p = p.next;
            }
        }
    }
}
Also used : AllocNode(soot.jimple.spark.pag.AllocNode) GeomPointsTo(soot.jimple.spark.geom.geomPA.GeomPointsTo) SootMethod(soot.SootMethod) HashMap(java.util.HashMap) Map(java.util.Map) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Example 13 with AllocNode

use of soot.jimple.spark.pag.AllocNode in project soot by Sable.

the class HeapInsNode method print_context_sensitive_points_to.

@Override
public void print_context_sensitive_points_to(PrintStream outPrintStream) {
    for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext(); ) {
        AllocNode obj = it.next();
        SegmentNode[] int_entry = find_points_to(obj);
        for (int j = 0; j < HeapInsIntervalManager.Divisions; ++j) {
            SegmentNode p = int_entry[j];
            while (p != null) {
                outPrintStream.println("(" + obj.toString() + ", " + p.I1 + ", " + p.I2 + ", " + p.L + ")");
                p = p.next;
            }
        }
    }
}
Also used : AllocNode(soot.jimple.spark.pag.AllocNode) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SegmentNode(soot.jimple.spark.geom.dataRep.SegmentNode)

Example 14 with AllocNode

use of soot.jimple.spark.pag.AllocNode in project soot by Sable.

the class MethodNodeFactory method caseStringConstant.

@Override
public final void caseStringConstant(StringConstant sc) {
    AllocNode stringConstant;
    if (pag.getOpts().string_constants() || Scene.v().containsClass(sc.value) || (sc.value.length() > 0 && sc.value.charAt(0) == '[')) {
        stringConstant = pag.makeStringConstantNode(sc.value);
    } else {
        stringConstant = pag.makeAllocNode(PointsToAnalysis.STRING_NODE, RefType.v("java.lang.String"), null);
    }
    VarNode stringConstantLocal = pag.makeGlobalVarNode(stringConstant, RefType.v("java.lang.String"));
    pag.addEdge(stringConstant, stringConstantLocal);
    setResult(stringConstantLocal);
}
Also used : VarNode(soot.jimple.spark.pag.VarNode) AllocNode(soot.jimple.spark.pag.AllocNode)

Example 15 with AllocNode

use of soot.jimple.spark.pag.AllocNode in project soot by Sable.

the class DemandCSPointsTo method findContextsForAllocs.

protected AllocAndContextSet findContextsForAllocs(final VarAndContext varAndContext, PointsToSetInternal locs) {
    if (contextForAllocsStack.contains(varAndContext)) {
        // recursion; check depth
        // we're fine for x = x.next
        int oldIndex = contextForAllocsStack.indexOf(varAndContext);
        if (oldIndex != contextForAllocsStack.size() - 1) {
            if (recursionDepth == -1) {
                recursionDepth = oldIndex + 1;
                if (DEBUG) {
                    debugPrint("RECURSION depth = " + recursionDepth);
                }
            } else if (contextForAllocsStack.size() - oldIndex > 5) {
                // just give up
                throw new TerminateEarlyException();
            }
        }
    }
    contextForAllocsStack.push(varAndContext);
    final AllocAndContextSet ret = new AllocAndContextSet();
    final PointsToSetInternal realLocs = checkContextsForAllocsCache(varAndContext, ret, locs);
    if (realLocs.isEmpty()) {
        if (DEBUG) {
            debugPrint("cached result " + ret);
        }
        contextForAllocsStack.pop();
        return ret;
    }
    nesting++;
    if (DEBUG) {
        debugPrint("finding alloc contexts for " + varAndContext);
    }
    try {
        final Set<VarAndContext> marked = new HashSet<VarAndContext>();
        final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
        final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
        p.prop(varAndContext);
        IncomingEdgeHandler edgeHandler = new IncomingEdgeHandler() {

            @Override
            public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
                if (realLocs.contains(allocNode)) {
                    if (DEBUG) {
                        debugPrint("found alloc " + allocNode);
                    }
                    ret.add(new AllocAndContext(allocNode, origVarAndContext.context));
                }
            }

            @Override
            public void handleMatchSrc(final VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase, VarNode storeBase, VarAndContext origVarAndContext, SparkField field, boolean refine) {
                if (DEBUG) {
                    debugPrint("handling src " + matchSrc);
                    debugPrint("intersection " + intersection);
                }
                if (!refine) {
                    p.prop(new VarAndContext(matchSrc, EMPTY_CALLSTACK));
                    return;
                }
                AllocAndContextSet allocContexts = findContextsForAllocs(new VarAndContext(loadBase, origVarAndContext.context), intersection);
                if (DEBUG) {
                    debugPrint("alloc contexts " + allocContexts);
                }
                for (AllocAndContext allocAndContext : allocContexts) {
                    if (DEBUG) {
                        debugPrint("alloc and context " + allocAndContext);
                    }
                    CallingContextSet matchSrcContexts;
                    if (fieldCheckHeuristic.validFromBothEnds(field)) {
                        matchSrcContexts = findUpContextsForVar(allocAndContext, new VarContextAndUp(storeBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
                    } else {
                        matchSrcContexts = findVarContextsFromAlloc(allocAndContext, storeBase);
                    }
                    for (ImmutableStack<Integer> matchSrcContext : matchSrcContexts) {
                        // ret
                        // .add(new Pair<AllocNode,
                        // ImmutableStack<Integer>>(
                        // (AllocNode) n,
                        // matchSrcContext));
                        // ret.addAll(findContextsForAllocs(matchSrc,
                        // matchSrcContext, locs));
                        p.prop(new VarAndContext(matchSrc, matchSrcContext));
                    }
                }
            }

            @Override
            Object getResult() {
                return ret;
            }

            @Override
            void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
                p.prop(newVarAndContext);
            }

            @Override
            boolean shouldHandleSrc(VarNode src) {
                return realLocs.hasNonEmptyIntersection(src.getP2Set());
            }
        };
        processIncomingEdges(edgeHandler, worklist);
        // update the cache
        if (recursionDepth != -1) {
            // if we're beyond recursion, don't cache anything
            if (contextForAllocsStack.size() > recursionDepth) {
                if (DEBUG) {
                    debugPrint("REMOVING " + varAndContext);
                    debugPrint(contextForAllocsStack.toString());
                }
                contextsForAllocsCache.remove(varAndContext);
            } else {
                assert contextForAllocsStack.size() == recursionDepth : recursionDepth + " " + contextForAllocsStack;
                recursionDepth = -1;
                if (contextsForAllocsCache.containsKey(varAndContext)) {
                    contextsForAllocsCache.get(varAndContext).getO2().addAll(ret);
                } else {
                    PointsToSetInternal storedSet = new HybridPointsToSet(locs.getType(), pag);
                    storedSet.addAll(locs, null);
                    contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, ret));
                }
            }
        } else {
            if (contextsForAllocsCache.containsKey(varAndContext)) {
                contextsForAllocsCache.get(varAndContext).getO2().addAll(ret);
            } else {
                PointsToSetInternal storedSet = new HybridPointsToSet(locs.getType(), pag);
                storedSet.addAll(locs, null);
                contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, ret));
            }
        }
        nesting--;
        return ret;
    } catch (CallSiteException e) {
        contextsForAllocsCache.remove(varAndContext);
        throw e;
    } finally {
        contextForAllocsStack.pop();
    }
}
Also used : GlobalVarNode(soot.jimple.spark.pag.GlobalVarNode) LocalVarNode(soot.jimple.spark.pag.LocalVarNode) VarNode(soot.jimple.spark.pag.VarNode) PointsToSetInternal(soot.jimple.spark.sets.PointsToSetInternal) SparkField(soot.jimple.spark.pag.SparkField) AssignEdge(soot.jimple.spark.ondemand.pautil.AssignEdge) HybridPointsToSet(soot.jimple.spark.sets.HybridPointsToSet) Stack(soot.jimple.spark.ondemand.genericutil.Stack) ImmutableStack(soot.jimple.spark.ondemand.genericutil.ImmutableStack) AllocNode(soot.jimple.spark.pag.AllocNode) Propagator(soot.jimple.spark.ondemand.genericutil.Propagator) HashSet(java.util.HashSet)

Aggregations

AllocNode (soot.jimple.spark.pag.AllocNode)67 Node (soot.jimple.spark.pag.Node)37 VarNode (soot.jimple.spark.pag.VarNode)36 LocalVarNode (soot.jimple.spark.pag.LocalVarNode)28 PointsToSetInternal (soot.jimple.spark.sets.PointsToSetInternal)25 FieldRefNode (soot.jimple.spark.pag.FieldRefNode)22 P2SetVisitor (soot.jimple.spark.sets.P2SetVisitor)19 PlainConstraint (soot.jimple.spark.geom.dataRep.PlainConstraint)18 SegmentNode (soot.jimple.spark.geom.dataRep.SegmentNode)18 SootMethod (soot.SootMethod)17 ClassConstantNode (soot.jimple.spark.pag.ClassConstantNode)17 SparkField (soot.jimple.spark.pag.SparkField)16 RefType (soot.RefType)14 Type (soot.Type)13 AllocDotField (soot.jimple.spark.pag.AllocDotField)13 NewInstanceNode (soot.jimple.spark.pag.NewInstanceNode)11 HashSet (java.util.HashSet)10 GlobalVarNode (soot.jimple.spark.pag.GlobalVarNode)10 StringConstantNode (soot.jimple.spark.pag.StringConstantNode)10 SootClass (soot.SootClass)9