use of soot.jimple.spark.pag.SparkField in project soot by Sable.
the class DemandCSPointsTo method getFlowsToHelper.
protected Set<VarNode> getFlowsToHelper(AllocAndContext allocAndContext) {
Set<VarNode> ret = new ArraySet<VarNode>();
try {
HashSet<VarAndContext> marked = new HashSet<VarAndContext>();
Stack<VarAndContext> worklist = new Stack<VarAndContext>();
Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
AllocNode alloc = allocAndContext.alloc;
ImmutableStack<Integer> allocContext = allocAndContext.context;
Node[] newBarNodes = pag.allocLookup(alloc);
for (int i = 0; i < newBarNodes.length; i++) {
VarNode v = (VarNode) newBarNodes[i];
ret.add(v);
p.prop(new VarAndContext(v, allocContext));
}
while (!worklist.isEmpty()) {
incrementNodesTraversed();
VarAndContext curVarAndContext = worklist.pop();
if (DEBUG) {
debugPrint("looking at " + curVarAndContext);
}
VarNode curVar = curVarAndContext.var;
ImmutableStack<Integer> curContext = curVarAndContext.context;
ret.add(curVar);
// assign
Collection<AssignEdge> assignEdges = filterAssigns(curVar, curContext, false, true);
for (AssignEdge assignEdge : assignEdges) {
VarNode dst = assignEdge.getDst();
ImmutableStack<Integer> newContext = curContext;
if (assignEdge.isReturnEdge()) {
if (!curContext.isEmpty()) {
if (!callEdgeInSCC(assignEdge)) {
assert assignEdge.getCallSite().equals(curContext.peek()) : assignEdge + " " + curContext;
newContext = curContext.pop();
} else {
newContext = popRecursiveCallSites(curContext);
}
}
} else if (assignEdge.isParamEdge()) {
if (DEBUG)
debugPrint("entering call site " + assignEdge.getCallSite());
// if (!isRecursive(curContext, assignEdge)) {
// newContext = curContext.push(assignEdge
// .getCallSite());
// }
newContext = pushWithRecursionCheck(curContext, assignEdge);
}
if (assignEdge.isReturnEdge() && curContext.isEmpty() && csInfo.isVirtCall(assignEdge.getCallSite())) {
Set<SootMethod> targets = refineCallSite(assignEdge.getCallSite(), newContext);
if (!targets.contains(((LocalVarNode) assignEdge.getDst()).getMethod())) {
continue;
}
}
if (dst instanceof GlobalVarNode) {
newContext = EMPTY_CALLSTACK;
}
p.prop(new VarAndContext(dst, newContext));
}
// putfield_bars
Set<VarNode> matchTargets = vMatches.vMatchLookup(curVar);
Node[] pfTargets = pag.storeLookup(curVar);
for (int i = 0; i < pfTargets.length; i++) {
FieldRefNode frNode = (FieldRefNode) pfTargets[i];
final VarNode storeBase = frNode.getBase();
SparkField field = frNode.getField();
// FieldRefNode>(curVar, frNode);
for (Pair<VarNode, VarNode> load : fieldToLoads.get(field)) {
final VarNode loadBase = load.getO2();
final PointsToSetInternal loadBaseP2Set = loadBase.getP2Set();
final PointsToSetInternal storeBaseP2Set = storeBase.getP2Set();
final VarNode matchTgt = load.getO1();
if (matchTargets.contains(matchTgt)) {
if (DEBUG) {
debugPrint("match source " + matchTgt);
}
PointsToSetInternal intersection = SootUtil.constructIntersection(storeBaseP2Set, loadBaseP2Set, pag);
boolean checkField = fieldCheckHeuristic.validateMatchesForField(field);
if (checkField) {
AllocAndContextSet sharedAllocContexts = findContextsForAllocs(new VarAndContext(storeBase, curContext), intersection);
for (AllocAndContext curAllocAndContext : sharedAllocContexts) {
CallingContextSet upContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
upContexts = findUpContextsForVar(curAllocAndContext, new VarContextAndUp(loadBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
} else {
upContexts = findVarContextsFromAlloc(curAllocAndContext, loadBase);
}
for (ImmutableStack<Integer> upContext : upContexts) {
p.prop(new VarAndContext(matchTgt, upContext));
}
}
} else {
p.prop(new VarAndContext(matchTgt, EMPTY_CALLSTACK));
}
// h.handleMatchSrc(matchSrc, intersection,
// storeBase,
// loadBase, varAndContext, checkGetfield);
// if (h.terminate())
// return;
}
}
}
}
return ret;
} catch (CallSiteException e) {
allocAndContextCache.remove(allocAndContext);
throw e;
}
}
use of soot.jimple.spark.pag.SparkField in project soot by Sable.
the class DemandCSPointsTo method dumpPathForLoc.
/*
* (non-Javadoc)
*
* @see AAA.summary.Refiner#dumpPathForBadLoc(soot.jimple.spark.pag.VarNode,
* soot.jimple.spark.pag.AllocNode)
*/
protected void dumpPathForLoc(VarNode v, final AllocNode badLoc, String filePrefix) {
final HashSet<VarNode> visited = new HashSet<VarNode>();
final DotPointerGraph dotGraph = new DotPointerGraph();
final class Helper {
boolean handle(VarNode curNode) {
assert curNode.getP2Set().contains(badLoc);
visited.add(curNode);
Node[] newEdges = pag.allocInvLookup(curNode);
for (int i = 0; i < newEdges.length; i++) {
AllocNode alloc = (AllocNode) newEdges[i];
if (alloc.equals(badLoc)) {
dotGraph.addNew(alloc, curNode);
return true;
}
}
for (AssignEdge assignEdge : csInfo.getAssignEdges(curNode)) {
VarNode other = assignEdge.getSrc();
if (other.getP2Set().contains(badLoc) && !visited.contains(other) && handle(other)) {
if (assignEdge.isCallEdge()) {
dotGraph.addCall(other, curNode, assignEdge.getCallSite());
} else {
dotGraph.addAssign(other, curNode);
}
return true;
}
}
Node[] loadEdges = pag.loadInvLookup(curNode);
for (int i = 0; i < loadEdges.length; i++) {
FieldRefNode frNode = (FieldRefNode) loadEdges[i];
SparkField field = frNode.getField();
VarNode base = frNode.getBase();
PointsToSetInternal baseP2Set = base.getP2Set();
for (Pair<VarNode, VarNode> store : fieldToStores.get(field)) {
if (store.getO2().getP2Set().hasNonEmptyIntersection(baseP2Set)) {
VarNode matchSrc = store.getO1();
if (matchSrc.getP2Set().contains(badLoc) && !visited.contains(matchSrc) && handle(matchSrc)) {
dotGraph.addMatch(matchSrc, curNode);
return true;
}
}
}
}
return false;
}
}
Helper h = new Helper();
h.handle(v);
// logger.debug(""+dotGraph.numEdges() + " edges on path");
dotGraph.dump("tmp/" + filePrefix + v.getNumber() + "_" + badLoc.getNumber() + ".dot");
}
use of soot.jimple.spark.pag.SparkField in project soot by Sable.
the class PropAlias method propagate.
/**
* Actually does the propagation.
*/
public final void propagate() {
ofcg = pag.getOnFlyCallGraph();
new TopoSorter(pag, false).sort();
for (Object object : pag.loadSources()) {
final FieldRefNode fr = (FieldRefNode) object;
fieldToBase.put(fr.getField(), fr.getBase());
}
for (Object object : pag.storeInvSources()) {
final FieldRefNode fr = (FieldRefNode) object;
fieldToBase.put(fr.getField(), fr.getBase());
}
for (Object object : pag.allocSources()) {
handleAllocNode((AllocNode) object);
}
boolean verbose = pag.getOpts().verbose();
do {
if (verbose) {
logger.debug("Worklist has " + varNodeWorkList.size() + " nodes.");
}
aliasWorkList = new HashSet<VarNode>();
while (!varNodeWorkList.isEmpty()) {
VarNode src = varNodeWorkList.iterator().next();
varNodeWorkList.remove(src);
aliasWorkList.add(src);
handleVarNode(src);
}
if (verbose) {
logger.debug("Now handling field references");
}
for (VarNode src : aliasWorkList) {
for (FieldRefNode srcFr : src.getAllFieldRefs()) {
SparkField field = srcFr.getField();
for (VarNode dst : fieldToBase.get(field)) {
if (src.getP2Set().hasNonEmptyIntersection(dst.getP2Set())) {
FieldRefNode dstFr = dst.dot(field);
aliasEdges.put(srcFr, dstFr);
aliasEdges.put(dstFr, srcFr);
fieldRefWorkList.add(srcFr);
fieldRefWorkList.add(dstFr);
if (makeP2Set(dstFr).addAll(srcFr.getP2Set().getOldSet(), null)) {
outFieldRefWorkList.add(dstFr);
}
if (makeP2Set(srcFr).addAll(dstFr.getP2Set().getOldSet(), null)) {
outFieldRefWorkList.add(srcFr);
}
}
}
}
}
for (FieldRefNode src : fieldRefWorkList) {
for (FieldRefNode dst : aliasEdges.get(src)) {
if (makeP2Set(dst).addAll(src.getP2Set().getNewSet(), null)) {
outFieldRefWorkList.add(dst);
}
}
src.getP2Set().flushNew();
}
fieldRefWorkList = new HashSet<FieldRefNode>();
for (FieldRefNode src : outFieldRefWorkList) {
PointsToSetInternal set = getP2Set(src).getNewSet();
if (set.isEmpty())
continue;
Node[] targets = pag.loadLookup(src);
for (Node element0 : targets) {
VarNode target = (VarNode) element0;
if (target.makeP2Set().addAll(set, null)) {
addToWorklist(target);
}
}
getP2Set(src).flushNew();
}
outFieldRefWorkList = new HashSet<FieldRefNode>();
} while (!varNodeWorkList.isEmpty());
}
use of soot.jimple.spark.pag.SparkField in project soot by Sable.
the class PropWorklist method handleVarNode.
/**
* Propagates new points-to information of node src to all its successors.
*/
protected final boolean handleVarNode(final VarNode src) {
boolean ret = false;
boolean flush = true;
if (src.getReplacement() != src)
throw new RuntimeException("Got bad node " + src + " with rep " + src.getReplacement());
final PointsToSetInternal newP2Set = src.getP2Set().getNewSet();
if (newP2Set.isEmpty())
return false;
if (ofcg != null) {
QueueReader<Node> addedEdges = pag.edgeReader();
ofcg.updatedNode(src);
ofcg.build();
while (addedEdges.hasNext()) {
Node addedSrc = (Node) addedEdges.next();
Node addedTgt = (Node) addedEdges.next();
ret = true;
if (addedSrc instanceof VarNode) {
VarNode edgeSrc = (VarNode) addedSrc.getReplacement();
if (addedTgt instanceof VarNode) {
VarNode edgeTgt = (VarNode) addedTgt.getReplacement();
if (edgeTgt.makeP2Set().addAll(edgeSrc.getP2Set(), null)) {
varNodeWorkList.add(edgeTgt);
if (edgeTgt == src)
flush = false;
}
} else if (addedTgt instanceof NewInstanceNode) {
NewInstanceNode edgeTgt = (NewInstanceNode) addedTgt.getReplacement();
if (edgeTgt.makeP2Set().addAll(edgeSrc.getP2Set(), null)) {
for (Node element : pag.assignInstanceLookup(edgeTgt)) {
varNodeWorkList.add((VarNode) element);
if (element == src)
flush = false;
}
}
}
} else if (addedSrc instanceof AllocNode) {
VarNode edgeTgt = (VarNode) addedTgt.getReplacement();
if (edgeTgt.makeP2Set().add(addedSrc)) {
varNodeWorkList.add(edgeTgt);
if (edgeTgt == src)
flush = false;
}
} else if (addedSrc instanceof NewInstanceNode && addedTgt instanceof VarNode) {
final NewInstanceNode edgeSrc = (NewInstanceNode) addedSrc.getReplacement();
final VarNode edgeTgt = (VarNode) addedTgt.getReplacement();
addedSrc.getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
if (n instanceof ClassConstantNode) {
ClassConstantNode ccn = (ClassConstantNode) n;
Type ccnType = ccn.getClassConstant().toSootType();
// If the referenced class has not been loaded,
// we do this now
SootClass targetClass = ((RefType) ccnType).getSootClass();
if (targetClass.resolvingLevel() == SootClass.DANGLING)
Scene.v().forceResolve(targetClass.getName(), SootClass.SIGNATURES);
// We can only create alloc nodes for types that
// we know
edgeTgt.makeP2Set().add(pag.makeAllocNode(edgeSrc.getValue(), ccnType, ccn.getMethod()));
varNodeWorkList.add(edgeTgt);
}
}
});
if (edgeTgt.makeP2Set().add(addedSrc)) {
if (edgeTgt == src)
flush = false;
}
}
}
}
Node[] simpleTargets = pag.simpleLookup(src);
for (Node element : simpleTargets) {
if (element.makeP2Set().addAll(newP2Set, null)) {
varNodeWorkList.add((VarNode) element);
if (element == src)
flush = false;
ret = true;
}
}
Node[] storeTargets = pag.storeLookup(src);
for (Node element : storeTargets) {
final FieldRefNode fr = (FieldRefNode) element;
final SparkField f = fr.getField();
ret = fr.getBase().getP2Set().forall(new P2SetVisitor() {
public final void visit(Node n) {
AllocDotField nDotF = pag.makeAllocDotField((AllocNode) n, f);
if (nDotF.makeP2Set().addAll(newP2Set, null)) {
returnValue = true;
}
}
}) | ret;
}
final HashSet<Node[]> storesToPropagate = new HashSet<Node[]>();
final HashSet<Node[]> loadsToPropagate = new HashSet<Node[]>();
for (final FieldRefNode fr : src.getAllFieldRefs()) {
final SparkField field = fr.getField();
final Node[] storeSources = pag.storeInvLookup(fr);
if (storeSources.length > 0) {
newP2Set.forall(new P2SetVisitor() {
public final void visit(Node n) {
AllocDotField nDotF = pag.makeAllocDotField((AllocNode) n, field);
for (Node element : storeSources) {
Node[] pair = { element, nDotF.getReplacement() };
storesToPropagate.add(pair);
}
}
});
}
final Node[] loadTargets = pag.loadLookup(fr);
if (loadTargets.length > 0) {
newP2Set.forall(new P2SetVisitor() {
public final void visit(Node n) {
AllocDotField nDotF = pag.makeAllocDotField((AllocNode) n, field);
if (nDotF != null) {
for (Node element : loadTargets) {
Node[] pair = { nDotF.getReplacement(), element };
loadsToPropagate.add(pair);
}
}
}
});
}
}
if (flush)
src.getP2Set().flushNew();
for (Node[] p : storesToPropagate) {
VarNode storeSource = (VarNode) p[0];
AllocDotField nDotF = (AllocDotField) p[1];
if (nDotF.makeP2Set().addAll(storeSource.getP2Set(), null)) {
ret = true;
}
}
for (Node[] p : loadsToPropagate) {
AllocDotField nDotF = (AllocDotField) p[0];
VarNode loadTarget = (VarNode) p[1];
if (loadTarget.makeP2Set().addAll(nDotF.getP2Set(), null)) {
varNodeWorkList.add(loadTarget);
ret = true;
}
}
return ret;
}
use of soot.jimple.spark.pag.SparkField in project soot by Sable.
the class SootUtil method buildStoreMap.
public static FieldAccessMap buildStoreMap(PAG pag) {
FieldAccessMap ret = new FieldAccessMap();
Iterator frNodeIter = pag.storeInvSourcesIterator();
while (frNodeIter.hasNext()) {
FieldRefNode frNode = (FieldRefNode) frNodeIter.next();
SparkField field = frNode.getField();
Node[] targets = pag.storeInvLookup(frNode);
for (int i = 0; i < targets.length; i++) {
VarNode target = (VarNode) targets[i];
if (target instanceof GlobalVarNode)
continue;
ret.put(field, new Pair<FieldRefNode, LocalVarNode>(frNode, (LocalVarNode) target));
}
}
return ret;
}
Aggregations