Search in sources :

Example 1 with SparkField

use of soot.jimple.spark.pag.SparkField in project soot by Sable.

the class GeomPointsTo method preprocess.

/**
 *	Read in the program facts generated by SPARK.
 *  We also construct our own call graph and pointer variables.
 */
private void preprocess() {
    int id;
    int s, t;
    // Build the call graph
    n_func = Scene.v().getReachableMethods().size() + 1;
    call_graph = new CgEdge[n_func];
    n_calls = 0;
    n_reach_spark_user_methods = 0;
    id = 1;
    QueueReader<MethodOrMethodContext> smList = Scene.v().getReachableMethods().listener();
    CallGraph soot_callgraph = Scene.v().getCallGraph();
    while (smList.hasNext()) {
        final SootMethod func = smList.next().method();
        func2int.put(func, id);
        int2func.put(id, func);
        /*
			 * We cannot identify all entry methods since some entry methods call themselves.
			 * In that case, the Soot CallGraph.isEntryMethod() function returns false.
			 */
        if (soot_callgraph.isEntryMethod(func) || func.isEntryMethod()) {
            CgEdge p = new CgEdge(Constants.SUPER_MAIN, id, null, call_graph[Constants.SUPER_MAIN]);
            call_graph[Constants.SUPER_MAIN] = p;
            n_calls++;
        }
        if (!func.isJavaLibraryMethod())
            ++n_reach_spark_user_methods;
        id++;
    }
    // Next, we scan all the call edges and rebuild the call graph in our own vocabulary
    QueueReader<Edge> edgeList = Scene.v().getCallGraph().listener();
    while (edgeList.hasNext()) {
        Edge edge = edgeList.next();
        if (edge.isClinit()) {
            continue;
        }
        SootMethod src_func = edge.src();
        SootMethod tgt_func = edge.tgt();
        s = func2int.get(src_func);
        t = func2int.get(tgt_func);
        // Create a new call edge in our own format
        CgEdge p = new CgEdge(s, t, edge, call_graph[s]);
        call_graph[s] = p;
        edgeMapping.put(edge, p);
        // We collect callsite information
        Stmt callsite = edge.srcStmt();
        if (edge.isThreadRunCall() || edge.kind().isExecutor() || edge.kind().isAsyncTask()) {
            // We don't modify the treatment to the thread run() calls
            thread_run_callsites.add(callsite);
        } else if (edge.isInstance() && !edge.isSpecial()) {
            // We try to refine the virtual callsites (virtual + interface) with multiple call targets
            InstanceInvokeExpr expr = (InstanceInvokeExpr) callsite.getInvokeExpr();
            if (expr.getMethodRef().getSignature().contains("<java.lang.Thread: void start()>")) {
                // It is a thread start function
                thread_run_callsites.add(callsite);
            } else {
                p.base_var = findLocalVarNode(expr.getBase());
                if (SootInfo.countCallEdgesForCallsite(callsite, true) > 1 && p.base_var != null) {
                    multiCallsites.add(callsite);
                }
            }
        }
        ++n_calls;
    }
    // We build the wrappers for all the pointers built by SPARK
    for (Iterator<VarNode> it = getVarNodeNumberer().iterator(); it.hasNext(); ) {
        VarNode vn = it.next();
        IVarAbstraction pn = makeInternalNode(vn);
        pointers.add(pn);
    }
    for (Iterator<AllocDotField> it = getAllocDotFieldNodeNumberer().iterator(); it.hasNext(); ) {
        AllocDotField adf = it.next();
        // Some allocdotfield is invalid, we check and remove them
        SparkField field = adf.getField();
        if (field instanceof SootField) {
            // This is an instance field of a class
            Type decType = ((SootField) field).getDeclaringClass().getType();
            Type baseType = adf.getBase().getType();
            // baseType must be a sub type of decType
            if (!castNeverFails(baseType, decType))
                continue;
        }
        IVarAbstraction pn = makeInternalNode(adf);
        pointers.add(pn);
    }
    for (Iterator<AllocNode> it = getAllocNodeNumberer().iterator(); it.hasNext(); ) {
        AllocNode obj = it.next();
        IVarAbstraction pn = makeInternalNode(obj);
        allocations.add(pn);
    }
    // The address constraints, new obj -> p
    for (Object object : allocSources()) {
        IVarAbstraction obj = makeInternalNode((AllocNode) object);
        Node[] succs = allocLookup((AllocNode) object);
        for (Node element0 : succs) {
            PlainConstraint cons = new PlainConstraint();
            IVarAbstraction p = makeInternalNode(element0);
            cons.expr.setPair(obj, p);
            cons.type = Constants.NEW_CONS;
            constraints.add(cons);
        }
    }
    // The assign constraints, p -> q
    Pair<Node, Node> intercall = new Pair<Node, Node>();
    for (Object object : simpleSources()) {
        IVarAbstraction p = makeInternalNode((VarNode) object);
        Node[] succs = simpleLookup((VarNode) object);
        for (Node element0 : succs) {
            PlainConstraint cons = new PlainConstraint();
            IVarAbstraction q = makeInternalNode(element0);
            cons.expr.setPair(p, q);
            cons.type = Constants.ASSIGN_CONS;
            intercall.setPair((VarNode) object, element0);
            cons.interCallEdges = lookupEdgesForAssignment(intercall);
            constraints.add(cons);
        }
    }
    intercall = null;
    assign2edges.clear();
    // The load constraints, p.f -> q
    for (Object object : loadSources()) {
        FieldRefNode frn = (FieldRefNode) object;
        IVarAbstraction p = makeInternalNode(frn.getBase());
        Node[] succs = loadLookup(frn);
        for (Node element0 : succs) {
            PlainConstraint cons = new PlainConstraint();
            IVarAbstraction q = makeInternalNode(element0);
            cons.f = frn.getField();
            cons.expr.setPair(p, q);
            cons.type = Constants.LOAD_CONS;
            constraints.add(cons);
        }
    }
    // The store constraints, p -> q.f
    for (Object object : storeSources()) {
        IVarAbstraction p = makeInternalNode((VarNode) object);
        Node[] succs = storeLookup((VarNode) object);
        for (Node element0 : succs) {
            PlainConstraint cons = new PlainConstraint();
            FieldRefNode frn = (FieldRefNode) element0;
            IVarAbstraction q = makeInternalNode(frn.getBase());
            cons.f = frn.getField();
            cons.expr.setPair(p, q);
            cons.type = Constants.STORE_CONS;
            constraints.add(cons);
        }
    }
    n_init_constraints = constraints.size();
    // Initialize other stuff
    low_cg = new int[n_func];
    vis_cg = new int[n_func];
    rep_cg = new int[n_func];
    indeg_cg = new int[n_func];
    scc_size = new int[n_func];
    block_num = new int[n_func];
    context_size = new long[n_func];
    max_context_size_block = new long[n_func];
}
Also used : CgEdge(soot.jimple.spark.geom.dataRep.CgEdge) AllocDotField(soot.jimple.spark.pag.AllocDotField) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) SparkField(soot.jimple.spark.pag.SparkField) FieldRefNode(soot.jimple.spark.pag.FieldRefNode) ContextVarNode(soot.jimple.spark.pag.ContextVarNode) LocalVarNode(soot.jimple.spark.pag.LocalVarNode) Node(soot.jimple.spark.pag.Node) VarNode(soot.jimple.spark.pag.VarNode) AllocNode(soot.jimple.spark.pag.AllocNode) Stmt(soot.jimple.Stmt) MethodOrMethodContext(soot.MethodOrMethodContext) Pair(soot.toolkits.scalar.Pair) ContextVarNode(soot.jimple.spark.pag.ContextVarNode) LocalVarNode(soot.jimple.spark.pag.LocalVarNode) VarNode(soot.jimple.spark.pag.VarNode) InstanceInvokeExpr(soot.jimple.InstanceInvokeExpr) PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) RefType(soot.RefType) Type(soot.Type) FieldRefNode(soot.jimple.spark.pag.FieldRefNode) CallGraph(soot.jimple.toolkits.callgraph.CallGraph) AllocNode(soot.jimple.spark.pag.AllocNode) SootMethod(soot.SootMethod) SootField(soot.SootField) Edge(soot.jimple.toolkits.callgraph.Edge) CgEdge(soot.jimple.spark.geom.dataRep.CgEdge)

Example 2 with SparkField

use of soot.jimple.spark.pag.SparkField in project soot by Sable.

the class OfflineProcessor method buildDependenceGraph.

/**
 * The dependence graph reverses the assignment relations. E.g., p = q  =>  p -> q
 * Note that, the assignments that are eliminated by local variable merging should be used here.
 * Otherwise, the graph would be erroneously disconnected.
 */
protected void buildDependenceGraph() {
    for (PlainConstraint cons : geomPTA.constraints) {
        // In our constraint representation, lhs -> rhs means rhs = lhs.
        final IVarAbstraction lhs = cons.getLHS();
        final IVarAbstraction rhs = cons.getRHS();
        final SparkField field = cons.f;
        IVarAbstraction rep;
        // Now we use this constraint for graph construction
        switch(cons.type) {
            // rhs = lhs
            case Constants.ASSIGN_CONS:
                add_graph_edge(rhs.id, lhs.id);
                break;
            // rhs = lhs.f
            case Constants.LOAD_CONS:
                {
                    rep = lhs.getRepresentative();
                    if (rep.hasPTResult() == false) {
                        lhs.getWrappedNode().getP2Set().forall(new P2SetVisitor() {

                            @Override
                            public void visit(Node n) {
                                IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
                                if (padf == null || padf.reachable() == false)
                                    return;
                                off_graph_edge e = add_graph_edge(rhs.id, padf.id);
                                e.base_var = lhs;
                            }
                        });
                    } else {
                        // Use geom
                        for (AllocNode o : rep.get_all_points_to_objects()) {
                            IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
                            if (padf == null || padf.reachable() == false)
                                continue;
                            off_graph_edge e = add_graph_edge(rhs.id, padf.id);
                            e.base_var = lhs;
                        }
                    }
                }
                break;
            // rhs.f = lhs
            case Constants.STORE_CONS:
                {
                    rep = rhs.getRepresentative();
                    if (rep.hasPTResult() == false) {
                        rhs.getWrappedNode().getP2Set().forall(new P2SetVisitor() {

                            @Override
                            public void visit(Node n) {
                                IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
                                if (padf == null || padf.reachable() == false)
                                    return;
                                off_graph_edge e = add_graph_edge(padf.id, lhs.id);
                                e.base_var = rhs;
                            }
                        });
                    } else {
                        // use geom
                        for (AllocNode o : rep.get_all_points_to_objects()) {
                            IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
                            if (padf == null || padf.reachable() == false)
                                continue;
                            off_graph_edge e = add_graph_edge(padf.id, lhs.id);
                            e.base_var = rhs;
                        }
                    }
                }
                break;
        }
    }
}
Also used : PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) AllocNode(soot.jimple.spark.pag.AllocNode) SparkField(soot.jimple.spark.pag.SparkField) GlobalVarNode(soot.jimple.spark.pag.GlobalVarNode) LocalVarNode(soot.jimple.spark.pag.LocalVarNode) Node(soot.jimple.spark.pag.Node) VarNode(soot.jimple.spark.pag.VarNode) AllocNode(soot.jimple.spark.pag.AllocNode) P2SetVisitor(soot.jimple.spark.sets.P2SetVisitor)

Example 3 with SparkField

use of soot.jimple.spark.pag.SparkField in project soot by Sable.

the class OfflineProcessor method distillConstraints.

/**
 * Eliminate the constraints that do not contribute points-to information to the seed pointers.
 * Prerequisite: dependence graph
 */
protected void distillConstraints() {
    IVarAbstraction pn;
    // Mark the pointers
    computeReachablePts();
    // Mark the constraints
    for (PlainConstraint cons : geomPTA.constraints) {
        // We only look at the receiver pointers
        pn = cons.getRHS();
        final SparkField field = cons.f;
        visitedFlag = false;
        switch(cons.type) {
            case Constants.NEW_CONS:
            case Constants.ASSIGN_CONS:
            case Constants.LOAD_CONS:
                visitedFlag = pn.willUpdate;
                break;
            case Constants.STORE_CONS:
                /**
                 * Interesting point in store constraint p.f = q:
                 * For example, pts(p) = { o1, o2 };
                 * If any of the o1.f and the o2.f (e.g. o1.f) will be updated, this constraint should be kept.
                 * However, in the points-to analysis, we only assign to o1.f.
                 */
                pn = pn.getRepresentative();
                if (pn.hasPTResult() == false) {
                    pn.getWrappedNode().getP2Set().forall(new P2SetVisitor() {

                        @Override
                        public void visit(Node n) {
                            if (visitedFlag)
                                return;
                            IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
                            if (padf == null || padf.reachable() == false)
                                return;
                            visitedFlag |= padf.willUpdate;
                        }
                    });
                } else {
                    // Use the geometric points-to result
                    for (AllocNode o : pn.get_all_points_to_objects()) {
                        IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
                        if (padf == null || padf.reachable() == false)
                            continue;
                        visitedFlag |= padf.willUpdate;
                        if (visitedFlag)
                            break;
                    }
                }
                break;
        }
        cons.isActive = visitedFlag;
    }
}
Also used : PlainConstraint(soot.jimple.spark.geom.dataRep.PlainConstraint) AllocNode(soot.jimple.spark.pag.AllocNode) SparkField(soot.jimple.spark.pag.SparkField) GlobalVarNode(soot.jimple.spark.pag.GlobalVarNode) LocalVarNode(soot.jimple.spark.pag.LocalVarNode) Node(soot.jimple.spark.pag.Node) VarNode(soot.jimple.spark.pag.VarNode) AllocNode(soot.jimple.spark.pag.AllocNode) P2SetVisitor(soot.jimple.spark.sets.P2SetVisitor)

Example 4 with SparkField

use of soot.jimple.spark.pag.SparkField in project soot by Sable.

the class DemandCSPointsTo method findContextsForAllocs.

protected AllocAndContextSet findContextsForAllocs(final VarAndContext varAndContext, PointsToSetInternal locs) {
    if (contextForAllocsStack.contains(varAndContext)) {
        // recursion; check depth
        // we're fine for x = x.next
        int oldIndex = contextForAllocsStack.indexOf(varAndContext);
        if (oldIndex != contextForAllocsStack.size() - 1) {
            if (recursionDepth == -1) {
                recursionDepth = oldIndex + 1;
                if (DEBUG) {
                    debugPrint("RECURSION depth = " + recursionDepth);
                }
            } else if (contextForAllocsStack.size() - oldIndex > 5) {
                // just give up
                throw new TerminateEarlyException();
            }
        }
    }
    contextForAllocsStack.push(varAndContext);
    final AllocAndContextSet ret = new AllocAndContextSet();
    final PointsToSetInternal realLocs = checkContextsForAllocsCache(varAndContext, ret, locs);
    if (realLocs.isEmpty()) {
        if (DEBUG) {
            debugPrint("cached result " + ret);
        }
        contextForAllocsStack.pop();
        return ret;
    }
    nesting++;
    if (DEBUG) {
        debugPrint("finding alloc contexts for " + varAndContext);
    }
    try {
        final Set<VarAndContext> marked = new HashSet<VarAndContext>();
        final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
        final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
        p.prop(varAndContext);
        IncomingEdgeHandler edgeHandler = new IncomingEdgeHandler() {

            @Override
            public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
                if (realLocs.contains(allocNode)) {
                    if (DEBUG) {
                        debugPrint("found alloc " + allocNode);
                    }
                    ret.add(new AllocAndContext(allocNode, origVarAndContext.context));
                }
            }

            @Override
            public void handleMatchSrc(final VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase, VarNode storeBase, VarAndContext origVarAndContext, SparkField field, boolean refine) {
                if (DEBUG) {
                    debugPrint("handling src " + matchSrc);
                    debugPrint("intersection " + intersection);
                }
                if (!refine) {
                    p.prop(new VarAndContext(matchSrc, EMPTY_CALLSTACK));
                    return;
                }
                AllocAndContextSet allocContexts = findContextsForAllocs(new VarAndContext(loadBase, origVarAndContext.context), intersection);
                if (DEBUG) {
                    debugPrint("alloc contexts " + allocContexts);
                }
                for (AllocAndContext allocAndContext : allocContexts) {
                    if (DEBUG) {
                        debugPrint("alloc and context " + allocAndContext);
                    }
                    CallingContextSet matchSrcContexts;
                    if (fieldCheckHeuristic.validFromBothEnds(field)) {
                        matchSrcContexts = findUpContextsForVar(allocAndContext, new VarContextAndUp(storeBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
                    } else {
                        matchSrcContexts = findVarContextsFromAlloc(allocAndContext, storeBase);
                    }
                    for (ImmutableStack<Integer> matchSrcContext : matchSrcContexts) {
                        // ret
                        // .add(new Pair<AllocNode,
                        // ImmutableStack<Integer>>(
                        // (AllocNode) n,
                        // matchSrcContext));
                        // ret.addAll(findContextsForAllocs(matchSrc,
                        // matchSrcContext, locs));
                        p.prop(new VarAndContext(matchSrc, matchSrcContext));
                    }
                }
            }

            @Override
            Object getResult() {
                return ret;
            }

            @Override
            void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
                p.prop(newVarAndContext);
            }

            @Override
            boolean shouldHandleSrc(VarNode src) {
                return realLocs.hasNonEmptyIntersection(src.getP2Set());
            }
        };
        processIncomingEdges(edgeHandler, worklist);
        // update the cache
        if (recursionDepth != -1) {
            // if we're beyond recursion, don't cache anything
            if (contextForAllocsStack.size() > recursionDepth) {
                if (DEBUG) {
                    debugPrint("REMOVING " + varAndContext);
                    debugPrint(contextForAllocsStack.toString());
                }
                contextsForAllocsCache.remove(varAndContext);
            } else {
                assert contextForAllocsStack.size() == recursionDepth : recursionDepth + " " + contextForAllocsStack;
                recursionDepth = -1;
                if (contextsForAllocsCache.containsKey(varAndContext)) {
                    contextsForAllocsCache.get(varAndContext).getO2().addAll(ret);
                } else {
                    PointsToSetInternal storedSet = new HybridPointsToSet(locs.getType(), pag);
                    storedSet.addAll(locs, null);
                    contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, ret));
                }
            }
        } else {
            if (contextsForAllocsCache.containsKey(varAndContext)) {
                contextsForAllocsCache.get(varAndContext).getO2().addAll(ret);
            } else {
                PointsToSetInternal storedSet = new HybridPointsToSet(locs.getType(), pag);
                storedSet.addAll(locs, null);
                contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, ret));
            }
        }
        nesting--;
        return ret;
    } catch (CallSiteException e) {
        contextsForAllocsCache.remove(varAndContext);
        throw e;
    } finally {
        contextForAllocsStack.pop();
    }
}
Also used : GlobalVarNode(soot.jimple.spark.pag.GlobalVarNode) LocalVarNode(soot.jimple.spark.pag.LocalVarNode) VarNode(soot.jimple.spark.pag.VarNode) PointsToSetInternal(soot.jimple.spark.sets.PointsToSetInternal) SparkField(soot.jimple.spark.pag.SparkField) AssignEdge(soot.jimple.spark.ondemand.pautil.AssignEdge) HybridPointsToSet(soot.jimple.spark.sets.HybridPointsToSet) Stack(soot.jimple.spark.ondemand.genericutil.Stack) ImmutableStack(soot.jimple.spark.ondemand.genericutil.ImmutableStack) AllocNode(soot.jimple.spark.pag.AllocNode) Propagator(soot.jimple.spark.ondemand.genericutil.Propagator) HashSet(java.util.HashSet)

Example 5 with SparkField

use of soot.jimple.spark.pag.SparkField in project soot by Sable.

the class DemandCSPointsTo method findUpContextsForVar.

protected CallingContextSet findUpContextsForVar(AllocAndContext allocAndContext, VarContextAndUp varContextAndUp) {
    final AllocNode alloc = allocAndContext.alloc;
    final ImmutableStack<Integer> allocContext = allocAndContext.context;
    CallingContextSet tmpSet = checkUpContextCache(varContextAndUp, allocAndContext);
    if (tmpSet != null) {
        return tmpSet;
    }
    final CallingContextSet ret = new CallingContextSet();
    upContextCache.get(varContextAndUp).put(allocAndContext, ret);
    nesting++;
    if (DEBUG) {
        debugPrint("finding up context for " + varContextAndUp + " to " + alloc + " " + allocContext);
    }
    try {
        final Set<VarAndContext> marked = new HashSet<VarAndContext>();
        final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
        final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
        p.prop(varContextAndUp);
        class UpContextEdgeHandler extends IncomingEdgeHandler {

            @Override
            public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
                VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
                if (allocNode == alloc) {
                    if (allocContext.topMatches(contextAndUp.context)) {
                        ImmutableStack<Integer> reverse = contextAndUp.upContext.reverse();
                        ImmutableStack<Integer> toAdd = allocContext.popAll(contextAndUp.context).pushAll(reverse);
                        if (DEBUG) {
                            debugPrint("found up context " + toAdd);
                        }
                        ret.add(toAdd);
                    } else if (contextAndUp.context.topMatches(allocContext)) {
                        ImmutableStack<Integer> toAdd = contextAndUp.upContext.reverse();
                        if (DEBUG) {
                            debugPrint("found up context " + toAdd);
                        }
                        ret.add(toAdd);
                    }
                }
            }

            @Override
            public void handleMatchSrc(VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase, VarNode storeBase, VarAndContext origVarAndContext, SparkField field, boolean refine) {
                VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
                if (DEBUG) {
                    debugPrint("CHECKING " + alloc);
                }
                PointsToSetInternal tmp = new HybridPointsToSet(alloc.getType(), pag);
                tmp.add(alloc);
                AllocAndContextSet allocContexts = findContextsForAllocs(new VarAndContext(matchSrc, EMPTY_CALLSTACK), tmp);
                // Set allocContexts = Collections.singleton(new Object());
                if (!refine) {
                    if (!allocContexts.isEmpty()) {
                        ret.add(contextAndUp.upContext.reverse());
                    }
                } else {
                    if (!allocContexts.isEmpty()) {
                        for (AllocAndContext t : allocContexts) {
                            ImmutableStack<Integer> discoveredAllocContext = t.context;
                            if (!allocContext.topMatches(discoveredAllocContext)) {
                                continue;
                            }
                            ImmutableStack<Integer> trueAllocContext = allocContext.popAll(discoveredAllocContext);
                            AllocAndContextSet allocAndContexts = findContextsForAllocs(new VarAndContext(storeBase, trueAllocContext), intersection);
                            for (AllocAndContext allocAndContext : allocAndContexts) {
                                // CallingContextSet upContexts;
                                if (fieldCheckHeuristic.validFromBothEnds(field)) {
                                    ret.addAll(findUpContextsForVar(allocAndContext, new VarContextAndUp(loadBase, contextAndUp.context, contextAndUp.upContext)));
                                } else {
                                    CallingContextSet tmpContexts = findVarContextsFromAlloc(allocAndContext, loadBase);
                                    // upContexts = new CallingContextSet();
                                    for (ImmutableStack<Integer> tmpContext : tmpContexts) {
                                        if (tmpContext.topMatches(contextAndUp.context)) {
                                            ImmutableStack<Integer> reverse = contextAndUp.upContext.reverse();
                                            ImmutableStack<Integer> toAdd = tmpContext.popAll(contextAndUp.context).pushAll(reverse);
                                            ret.add(toAdd);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
            }

            @Override
            Object getResult() {
                return ret;
            }

            @Override
            void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
                VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
                ImmutableStack<Integer> upContext = contextAndUp.upContext;
                ImmutableStack<Integer> newUpContext = upContext;
                if (assignEdge.isParamEdge() && contextAndUp.context.isEmpty()) {
                    if (upContext.size() < ImmutableStack.getMaxSize()) {
                        newUpContext = pushWithRecursionCheck(upContext, assignEdge);
                    }
                    ;
                }
                p.prop(new VarContextAndUp(newVarAndContext.var, newVarAndContext.context, newUpContext));
            }

            @Override
            boolean shouldHandleSrc(VarNode src) {
                if (src instanceof GlobalVarNode) {
                    // // for now, just give up
                    throw new TerminateEarlyException();
                }
                return src.getP2Set().contains(alloc);
            }
        }
        ;
        UpContextEdgeHandler edgeHandler = new UpContextEdgeHandler();
        processIncomingEdges(edgeHandler, worklist);
        nesting--;
        // }
        return ret;
    } catch (CallSiteException e) {
        upContextCache.remove(varContextAndUp);
        throw e;
    }
}
Also used : GlobalVarNode(soot.jimple.spark.pag.GlobalVarNode) SparkField(soot.jimple.spark.pag.SparkField) AssignEdge(soot.jimple.spark.ondemand.pautil.AssignEdge) Propagator(soot.jimple.spark.ondemand.genericutil.Propagator) ImmutableStack(soot.jimple.spark.ondemand.genericutil.ImmutableStack) HashSet(java.util.HashSet) GlobalVarNode(soot.jimple.spark.pag.GlobalVarNode) LocalVarNode(soot.jimple.spark.pag.LocalVarNode) VarNode(soot.jimple.spark.pag.VarNode) PointsToSetInternal(soot.jimple.spark.sets.PointsToSetInternal) HybridPointsToSet(soot.jimple.spark.sets.HybridPointsToSet) Stack(soot.jimple.spark.ondemand.genericutil.Stack) ImmutableStack(soot.jimple.spark.ondemand.genericutil.ImmutableStack) AllocNode(soot.jimple.spark.pag.AllocNode)

Aggregations

AllocNode (soot.jimple.spark.pag.AllocNode)19 SparkField (soot.jimple.spark.pag.SparkField)19 VarNode (soot.jimple.spark.pag.VarNode)19 Node (soot.jimple.spark.pag.Node)17 FieldRefNode (soot.jimple.spark.pag.FieldRefNode)14 LocalVarNode (soot.jimple.spark.pag.LocalVarNode)14 GlobalVarNode (soot.jimple.spark.pag.GlobalVarNode)13 PointsToSetInternal (soot.jimple.spark.sets.PointsToSetInternal)12 HashSet (java.util.HashSet)7 AssignEdge (soot.jimple.spark.ondemand.pautil.AssignEdge)7 P2SetVisitor (soot.jimple.spark.sets.P2SetVisitor)7 SootMethod (soot.SootMethod)5 ImmutableStack (soot.jimple.spark.ondemand.genericutil.ImmutableStack)5 Stack (soot.jimple.spark.ondemand.genericutil.Stack)5 AllocDotField (soot.jimple.spark.pag.AllocDotField)5 ClassConstantNode (soot.jimple.spark.pag.ClassConstantNode)5 NewInstanceNode (soot.jimple.spark.pag.NewInstanceNode)5 PlainConstraint (soot.jimple.spark.geom.dataRep.PlainConstraint)4 Propagator (soot.jimple.spark.ondemand.genericutil.Propagator)4 Iterator (java.util.Iterator)3