use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class DemandCSPointsTo method findUpContextsForVar.
protected CallingContextSet findUpContextsForVar(AllocAndContext allocAndContext, VarContextAndUp varContextAndUp) {
final AllocNode alloc = allocAndContext.alloc;
final ImmutableStack<Integer> allocContext = allocAndContext.context;
CallingContextSet tmpSet = checkUpContextCache(varContextAndUp, allocAndContext);
if (tmpSet != null) {
return tmpSet;
}
final CallingContextSet ret = new CallingContextSet();
upContextCache.get(varContextAndUp).put(allocAndContext, ret);
nesting++;
if (DEBUG) {
debugPrint("finding up context for " + varContextAndUp + " to " + alloc + " " + allocContext);
}
try {
final Set<VarAndContext> marked = new HashSet<VarAndContext>();
final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
p.prop(varContextAndUp);
class UpContextEdgeHandler extends IncomingEdgeHandler {
@Override
public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
if (allocNode == alloc) {
if (allocContext.topMatches(contextAndUp.context)) {
ImmutableStack<Integer> reverse = contextAndUp.upContext.reverse();
ImmutableStack<Integer> toAdd = allocContext.popAll(contextAndUp.context).pushAll(reverse);
if (DEBUG) {
debugPrint("found up context " + toAdd);
}
ret.add(toAdd);
} else if (contextAndUp.context.topMatches(allocContext)) {
ImmutableStack<Integer> toAdd = contextAndUp.upContext.reverse();
if (DEBUG) {
debugPrint("found up context " + toAdd);
}
ret.add(toAdd);
}
}
}
@Override
public void handleMatchSrc(VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase, VarNode storeBase, VarAndContext origVarAndContext, SparkField field, boolean refine) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
if (DEBUG) {
debugPrint("CHECKING " + alloc);
}
PointsToSetInternal tmp = new HybridPointsToSet(alloc.getType(), pag);
tmp.add(alloc);
AllocAndContextSet allocContexts = findContextsForAllocs(new VarAndContext(matchSrc, EMPTY_CALLSTACK), tmp);
// Set allocContexts = Collections.singleton(new Object());
if (!refine) {
if (!allocContexts.isEmpty()) {
ret.add(contextAndUp.upContext.reverse());
}
} else {
if (!allocContexts.isEmpty()) {
for (AllocAndContext t : allocContexts) {
ImmutableStack<Integer> discoveredAllocContext = t.context;
if (!allocContext.topMatches(discoveredAllocContext)) {
continue;
}
ImmutableStack<Integer> trueAllocContext = allocContext.popAll(discoveredAllocContext);
AllocAndContextSet allocAndContexts = findContextsForAllocs(new VarAndContext(storeBase, trueAllocContext), intersection);
for (AllocAndContext allocAndContext : allocAndContexts) {
// CallingContextSet upContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
ret.addAll(findUpContextsForVar(allocAndContext, new VarContextAndUp(loadBase, contextAndUp.context, contextAndUp.upContext)));
} else {
CallingContextSet tmpContexts = findVarContextsFromAlloc(allocAndContext, loadBase);
// upContexts = new CallingContextSet();
for (ImmutableStack<Integer> tmpContext : tmpContexts) {
if (tmpContext.topMatches(contextAndUp.context)) {
ImmutableStack<Integer> reverse = contextAndUp.upContext.reverse();
ImmutableStack<Integer> toAdd = tmpContext.popAll(contextAndUp.context).pushAll(reverse);
ret.add(toAdd);
}
}
}
}
}
}
}
}
@Override
Object getResult() {
return ret;
}
@Override
void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
ImmutableStack<Integer> upContext = contextAndUp.upContext;
ImmutableStack<Integer> newUpContext = upContext;
if (assignEdge.isParamEdge() && contextAndUp.context.isEmpty()) {
if (upContext.size() < ImmutableStack.getMaxSize()) {
newUpContext = pushWithRecursionCheck(upContext, assignEdge);
}
;
}
p.prop(new VarContextAndUp(newVarAndContext.var, newVarAndContext.context, newUpContext));
}
@Override
boolean shouldHandleSrc(VarNode src) {
if (src instanceof GlobalVarNode) {
// // for now, just give up
throw new TerminateEarlyException();
}
return src.getP2Set().contains(alloc);
}
}
;
UpContextEdgeHandler edgeHandler = new UpContextEdgeHandler();
processIncomingEdges(edgeHandler, worklist);
nesting--;
// }
return ret;
} catch (CallSiteException e) {
upContextCache.remove(varContextAndUp);
throw e;
}
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class DemandCSPointsTo method getFlowsToHelper.
protected Set<VarNode> getFlowsToHelper(AllocAndContext allocAndContext) {
Set<VarNode> ret = new ArraySet<VarNode>();
try {
HashSet<VarAndContext> marked = new HashSet<VarAndContext>();
Stack<VarAndContext> worklist = new Stack<VarAndContext>();
Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
AllocNode alloc = allocAndContext.alloc;
ImmutableStack<Integer> allocContext = allocAndContext.context;
Node[] newBarNodes = pag.allocLookup(alloc);
for (int i = 0; i < newBarNodes.length; i++) {
VarNode v = (VarNode) newBarNodes[i];
ret.add(v);
p.prop(new VarAndContext(v, allocContext));
}
while (!worklist.isEmpty()) {
incrementNodesTraversed();
VarAndContext curVarAndContext = worklist.pop();
if (DEBUG) {
debugPrint("looking at " + curVarAndContext);
}
VarNode curVar = curVarAndContext.var;
ImmutableStack<Integer> curContext = curVarAndContext.context;
ret.add(curVar);
// assign
Collection<AssignEdge> assignEdges = filterAssigns(curVar, curContext, false, true);
for (AssignEdge assignEdge : assignEdges) {
VarNode dst = assignEdge.getDst();
ImmutableStack<Integer> newContext = curContext;
if (assignEdge.isReturnEdge()) {
if (!curContext.isEmpty()) {
if (!callEdgeInSCC(assignEdge)) {
assert assignEdge.getCallSite().equals(curContext.peek()) : assignEdge + " " + curContext;
newContext = curContext.pop();
} else {
newContext = popRecursiveCallSites(curContext);
}
}
} else if (assignEdge.isParamEdge()) {
if (DEBUG)
debugPrint("entering call site " + assignEdge.getCallSite());
// if (!isRecursive(curContext, assignEdge)) {
// newContext = curContext.push(assignEdge
// .getCallSite());
// }
newContext = pushWithRecursionCheck(curContext, assignEdge);
}
if (assignEdge.isReturnEdge() && curContext.isEmpty() && csInfo.isVirtCall(assignEdge.getCallSite())) {
Set<SootMethod> targets = refineCallSite(assignEdge.getCallSite(), newContext);
if (!targets.contains(((LocalVarNode) assignEdge.getDst()).getMethod())) {
continue;
}
}
if (dst instanceof GlobalVarNode) {
newContext = EMPTY_CALLSTACK;
}
p.prop(new VarAndContext(dst, newContext));
}
// putfield_bars
Set<VarNode> matchTargets = vMatches.vMatchLookup(curVar);
Node[] pfTargets = pag.storeLookup(curVar);
for (int i = 0; i < pfTargets.length; i++) {
FieldRefNode frNode = (FieldRefNode) pfTargets[i];
final VarNode storeBase = frNode.getBase();
SparkField field = frNode.getField();
// FieldRefNode>(curVar, frNode);
for (Pair<VarNode, VarNode> load : fieldToLoads.get(field)) {
final VarNode loadBase = load.getO2();
final PointsToSetInternal loadBaseP2Set = loadBase.getP2Set();
final PointsToSetInternal storeBaseP2Set = storeBase.getP2Set();
final VarNode matchTgt = load.getO1();
if (matchTargets.contains(matchTgt)) {
if (DEBUG) {
debugPrint("match source " + matchTgt);
}
PointsToSetInternal intersection = SootUtil.constructIntersection(storeBaseP2Set, loadBaseP2Set, pag);
boolean checkField = fieldCheckHeuristic.validateMatchesForField(field);
if (checkField) {
AllocAndContextSet sharedAllocContexts = findContextsForAllocs(new VarAndContext(storeBase, curContext), intersection);
for (AllocAndContext curAllocAndContext : sharedAllocContexts) {
CallingContextSet upContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
upContexts = findUpContextsForVar(curAllocAndContext, new VarContextAndUp(loadBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
} else {
upContexts = findVarContextsFromAlloc(curAllocAndContext, loadBase);
}
for (ImmutableStack<Integer> upContext : upContexts) {
p.prop(new VarAndContext(matchTgt, upContext));
}
}
} else {
p.prop(new VarAndContext(matchTgt, EMPTY_CALLSTACK));
}
// h.handleMatchSrc(matchSrc, intersection,
// storeBase,
// loadBase, varAndContext, checkGetfield);
// if (h.terminate())
// return;
}
}
}
}
return ret;
} catch (CallSiteException e) {
allocAndContextCache.remove(allocAndContext);
throw e;
}
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class DemandCSPointsTo method dumpPathForLoc.
/*
* (non-Javadoc)
*
* @see AAA.summary.Refiner#dumpPathForBadLoc(soot.jimple.spark.pag.VarNode,
* soot.jimple.spark.pag.AllocNode)
*/
protected void dumpPathForLoc(VarNode v, final AllocNode badLoc, String filePrefix) {
final HashSet<VarNode> visited = new HashSet<VarNode>();
final DotPointerGraph dotGraph = new DotPointerGraph();
final class Helper {
boolean handle(VarNode curNode) {
assert curNode.getP2Set().contains(badLoc);
visited.add(curNode);
Node[] newEdges = pag.allocInvLookup(curNode);
for (int i = 0; i < newEdges.length; i++) {
AllocNode alloc = (AllocNode) newEdges[i];
if (alloc.equals(badLoc)) {
dotGraph.addNew(alloc, curNode);
return true;
}
}
for (AssignEdge assignEdge : csInfo.getAssignEdges(curNode)) {
VarNode other = assignEdge.getSrc();
if (other.getP2Set().contains(badLoc) && !visited.contains(other) && handle(other)) {
if (assignEdge.isCallEdge()) {
dotGraph.addCall(other, curNode, assignEdge.getCallSite());
} else {
dotGraph.addAssign(other, curNode);
}
return true;
}
}
Node[] loadEdges = pag.loadInvLookup(curNode);
for (int i = 0; i < loadEdges.length; i++) {
FieldRefNode frNode = (FieldRefNode) loadEdges[i];
SparkField field = frNode.getField();
VarNode base = frNode.getBase();
PointsToSetInternal baseP2Set = base.getP2Set();
for (Pair<VarNode, VarNode> store : fieldToStores.get(field)) {
if (store.getO2().getP2Set().hasNonEmptyIntersection(baseP2Set)) {
VarNode matchSrc = store.getO1();
if (matchSrc.getP2Set().contains(badLoc) && !visited.contains(matchSrc) && handle(matchSrc)) {
dotGraph.addMatch(matchSrc, curNode);
return true;
}
}
}
}
return false;
}
}
Helper h = new Helper();
h.handle(v);
// logger.debug(""+dotGraph.numEdges() + " edges on path");
dotGraph.dump("tmp/" + filePrefix + v.getNumber() + "_" + badLoc.getNumber() + ".dot");
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class FullSensitiveNode method heap_sensitive_intersection.
@Override
public boolean heap_sensitive_intersection(IVarAbstraction qv) {
int i, j;
FullSensitiveNode qn;
SegmentNode p, q, pt[], qt[];
boolean localToSameMethod;
qn = (FullSensitiveNode) qv;
localToSameMethod = (enclosingMethod() == qv.enclosingMethod());
for (Iterator<AllocNode> it = pt_objs.keySet().iterator(); it.hasNext(); ) {
AllocNode an = it.next();
if (an instanceof ClassConstantNode)
continue;
if (an instanceof StringConstantNode)
continue;
qt = qn.find_points_to(an);
if (qt == null)
continue;
pt = find_points_to(an);
for (i = 0; i < GeometricManager.Divisions; ++i) {
p = pt[i];
while (p != null) {
for (j = 0; j < GeometricManager.Divisions; ++j) {
q = qt[j];
while (q != null) {
if (localToSameMethod) {
// We can use a more precise alias testing
if (p.intersect(q))
return true;
} else {
if (p.projYIntersect(q))
return true;
}
q = q.next;
}
}
p = p.next;
}
}
}
return false;
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class FullSensitiveNode method propagate.
/**
* The place where you implement the pointer assignment reasoning.
*/
@Override
public void propagate(GeomPointsTo ptAnalyzer, IWorklist worklist) {
int i, j;
AllocNode obj;
SegmentNode pts, pe, entry_pts[], entry_pe[];
GeometricManager gm1, gm2;
FullSensitiveNode qn, objn;
boolean added, hasNewPointsTo;
if (pt_objs.size() == 0)
return;
// We first build the flow edges that flow in to/out of object fields
if (complex_cons != null) {
for (Map.Entry<AllocNode, GeometricManager> entry : new_pts.entrySet()) {
obj = entry.getKey();
entry_pts = entry.getValue().getFigures();
for (PlainConstraint pcons : complex_cons) {
// For each newly points-to object, construct its instance field
objn = (FullSensitiveNode) ptAnalyzer.findInstanceField(obj, pcons.f);
if (objn == null) {
// This combination of allocdotfield must be invalid
// This expression p.f also renders that p cannot point to obj, so we remove it
// We label this event and sweep the garbage later
pt_objs.put(obj, (GeometricManager) deadManager);
entry.setValue((GeometricManager) deadManager);
break;
}
if (objn.willUpdate == false) {
// the points-to information of the seed pointers
continue;
}
qn = (FullSensitiveNode) pcons.otherSide;
for (i = 0; i < GeometricManager.Divisions; ++i) {
pts = entry_pts[i];
while (pts != null && pts.is_new == true) {
switch(pcons.type) {
case Constants.STORE_CONS:
// Store, qv -> pv.field
if (instantiateStoreConstraint(qn, objn, pts, (pcons.code << 8) | i))
worklist.push(qn);
break;
case Constants.LOAD_CONS:
// Load, pv.field -> qv
if (instantiateLoadConstraint(objn, qn, pts, (pcons.code << 8) | i))
worklist.push(objn);
break;
}
pts = pts.next;
}
}
}
}
}
if (flowto.size() == 0)
return;
// Next, we process the assignments (e.g. p = q)
for (Map.Entry<FullSensitiveNode, GeometricManager> entry1 : flowto.entrySet()) {
added = false;
qn = entry1.getKey();
gm1 = entry1.getValue();
entry_pe = gm1.getFigures();
// We have new flow-to edges
if (gm1.isThereUnprocessedFigures()) {
// Second, we get the points-to shapes
for (Map.Entry<AllocNode, GeometricManager> entry2 : pt_objs.entrySet()) {
obj = entry2.getKey();
gm2 = entry2.getValue();
// Avoid the garbage
if (gm2 == deadManager)
continue;
// Type filtering and flow-to-this filtering, a simple approach
if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getType()))
continue;
entry_pts = gm2.getFigures();
hasNewPointsTo = gm2.isThereUnprocessedFigures();
// We pair up all the geometric points-to tuples and flow edges
for (j = 0; j < GeometricManager.Divisions; ++j) {
pe = entry_pe[j];
while (pe != null) {
if (pe.is_new == false && hasNewPointsTo == false)
break;
for (i = 0; i < GeometricManager.Divisions; ++i) {
pts = entry_pts[i];
while (pts != null && (pts.is_new || pe.is_new)) {
// Propagate this object
if (reasonAndPropagate(qn, obj, pts, pe, (i << 8) | j))
added = true;
pts = pts.next;
}
}
pe = pe.next;
}
}
}
gm1.flush();
} else {
for (Map.Entry<AllocNode, GeometricManager> entry2 : new_pts.entrySet()) {
obj = entry2.getKey();
gm2 = entry2.getValue();
// Avoid the garbage
if (gm2 == deadManager)
continue;
// Type filtering and flow-to-this filtering, a simple approach
if (!ptAnalyzer.castNeverFails(obj.getType(), qn.getType()))
continue;
entry_pts = gm2.getFigures();
// We pair up all the geometric points-to tuples and flow edges
for (i = 0; i < GeometricManager.Divisions; ++i) {
pts = entry_pts[i];
while (pts != null && pts.is_new == true) {
for (j = 0; j < GeometricManager.Divisions; ++j) {
pe = entry_pe[j];
while (pe != null) {
// Propagate this object
if (reasonAndPropagate(qn, obj, pts, pe, (i << 8) | j))
added = true;
pe = pe.next;
}
}
pts = pts.next;
}
}
}
}
if (added)
worklist.push(qn);
}
// System.err.println();
}
Aggregations