use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class GeomQueries method contextsGoBy.
/**
* Searching the points-to results for field expression such as p.f.
*
* @param sootEdge
* @param l
* @param field
* @param visitor
* @return
*/
@SuppressWarnings("rawtypes")
public boolean contextsGoBy(Edge sootEdge, Local l, SparkField field, PtSensVisitor visitor) {
Obj_full_extractor pts_l = new Obj_full_extractor();
if (contextsGoBy(sootEdge, l, pts_l) == false)
return false;
visitor.prepare();
for (IntervalContextVar icv : pts_l.outList) {
AllocNode obj = (AllocNode) icv.var;
AllocDotField obj_f = geomPTA.findAllocDotField(obj, field);
if (obj_f == null)
continue;
IVarAbstraction objField = geomPTA.findInternalNode(obj_f);
if (objField == null)
continue;
long L = icv.L;
long R = icv.R;
assert L < R;
objField.get_all_context_sensitive_objects(L, R, visitor);
}
pts_l = null;
visitor.finish();
return visitor.numOfDiffObjects() != 0;
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class PropAlias method handleVarNode.
/**
* Propagates new points-to information of node src to all its successors.
*/
protected final boolean handleVarNode(final VarNode src) {
boolean ret = false;
if (src.getReplacement() != src)
throw new RuntimeException("Got bad node " + src + " with rep " + src.getReplacement());
final PointsToSetInternal newP2Set = src.getP2Set().getNewSet();
if (newP2Set.isEmpty())
return false;
if (ofcg != null) {
QueueReader<Node> addedEdges = pag.edgeReader();
ofcg.updatedNode(src);
ofcg.build();
while (addedEdges.hasNext()) {
Node addedSrc = (Node) addedEdges.next();
Node addedTgt = (Node) addedEdges.next();
ret = true;
if (addedSrc instanceof VarNode) {
VarNode edgeSrc = (VarNode) addedSrc;
if (addedTgt instanceof VarNode) {
VarNode edgeTgt = (VarNode) addedTgt;
if (edgeTgt.makeP2Set().addAll(edgeSrc.getP2Set(), null))
addToWorklist(edgeTgt);
} else if (addedTgt instanceof NewInstanceNode) {
NewInstanceNode edgeTgt = (NewInstanceNode) addedTgt.getReplacement();
if (edgeTgt.makeP2Set().addAll(edgeSrc.getP2Set(), null)) {
for (Node element : pag.assignInstanceLookup(edgeTgt)) {
addToWorklist((VarNode) element);
}
}
}
} else if (addedSrc instanceof AllocNode) {
AllocNode edgeSrc = (AllocNode) addedSrc;
VarNode edgeTgt = (VarNode) addedTgt;
if (edgeTgt.makeP2Set().add(edgeSrc))
addToWorklist(edgeTgt);
} else if (addedSrc instanceof NewInstanceNode && addedTgt instanceof VarNode) {
final NewInstanceNode edgeSrc = (NewInstanceNode) addedSrc.getReplacement();
final VarNode edgeTgt = (VarNode) addedTgt.getReplacement();
addedSrc.getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
if (n instanceof ClassConstantNode) {
ClassConstantNode ccn = (ClassConstantNode) n;
Type ccnType = ccn.getClassConstant().toSootType();
// If the referenced class has not been loaded,
// we do this now
SootClass targetClass = ((RefType) ccnType).getSootClass();
if (targetClass.resolvingLevel() == SootClass.DANGLING)
Scene.v().forceResolve(targetClass.getName(), SootClass.SIGNATURES);
edgeTgt.makeP2Set().add(pag.makeAllocNode(edgeSrc.getValue(), ccnType, ccn.getMethod()));
addToWorklist(edgeTgt);
}
}
});
}
FieldRefNode frn = null;
if (addedSrc instanceof FieldRefNode)
frn = (FieldRefNode) addedSrc;
if (addedTgt instanceof FieldRefNode)
frn = (FieldRefNode) addedTgt;
if (frn != null) {
VarNode base = frn.getBase();
if (fieldToBase.put(frn.getField(), base)) {
aliasWorkList.add(base);
}
}
}
}
Node[] simpleTargets = pag.simpleLookup(src);
for (Node element : simpleTargets) {
if (element.makeP2Set().addAll(newP2Set, null)) {
addToWorklist((VarNode) element);
ret = true;
}
}
Node[] storeTargets = pag.storeLookup(src);
for (Node element : storeTargets) {
final FieldRefNode fr = (FieldRefNode) element;
if (fr.makeP2Set().addAll(newP2Set, null)) {
fieldRefWorkList.add(fr);
ret = true;
}
}
src.getP2Set().flushNew();
return ret;
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class PropAlias method propagate.
/**
* Actually does the propagation.
*/
public final void propagate() {
ofcg = pag.getOnFlyCallGraph();
new TopoSorter(pag, false).sort();
for (Object object : pag.loadSources()) {
final FieldRefNode fr = (FieldRefNode) object;
fieldToBase.put(fr.getField(), fr.getBase());
}
for (Object object : pag.storeInvSources()) {
final FieldRefNode fr = (FieldRefNode) object;
fieldToBase.put(fr.getField(), fr.getBase());
}
for (Object object : pag.allocSources()) {
handleAllocNode((AllocNode) object);
}
boolean verbose = pag.getOpts().verbose();
do {
if (verbose) {
logger.debug("Worklist has " + varNodeWorkList.size() + " nodes.");
}
aliasWorkList = new HashSet<VarNode>();
while (!varNodeWorkList.isEmpty()) {
VarNode src = varNodeWorkList.iterator().next();
varNodeWorkList.remove(src);
aliasWorkList.add(src);
handleVarNode(src);
}
if (verbose) {
logger.debug("Now handling field references");
}
for (VarNode src : aliasWorkList) {
for (FieldRefNode srcFr : src.getAllFieldRefs()) {
SparkField field = srcFr.getField();
for (VarNode dst : fieldToBase.get(field)) {
if (src.getP2Set().hasNonEmptyIntersection(dst.getP2Set())) {
FieldRefNode dstFr = dst.dot(field);
aliasEdges.put(srcFr, dstFr);
aliasEdges.put(dstFr, srcFr);
fieldRefWorkList.add(srcFr);
fieldRefWorkList.add(dstFr);
if (makeP2Set(dstFr).addAll(srcFr.getP2Set().getOldSet(), null)) {
outFieldRefWorkList.add(dstFr);
}
if (makeP2Set(srcFr).addAll(dstFr.getP2Set().getOldSet(), null)) {
outFieldRefWorkList.add(srcFr);
}
}
}
}
}
for (FieldRefNode src : fieldRefWorkList) {
for (FieldRefNode dst : aliasEdges.get(src)) {
if (makeP2Set(dst).addAll(src.getP2Set().getNewSet(), null)) {
outFieldRefWorkList.add(dst);
}
}
src.getP2Set().flushNew();
}
fieldRefWorkList = new HashSet<FieldRefNode>();
for (FieldRefNode src : outFieldRefWorkList) {
PointsToSetInternal set = getP2Set(src).getNewSet();
if (set.isEmpty())
continue;
Node[] targets = pag.loadLookup(src);
for (Node element0 : targets) {
VarNode target = (VarNode) element0;
if (target.makeP2Set().addAll(set, null)) {
addToWorklist(target);
}
}
getP2Set(src).flushNew();
}
outFieldRefWorkList = new HashSet<FieldRefNode>();
} while (!varNodeWorkList.isEmpty());
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class PropIter method propagate.
/**
* Actually does the propagation.
*/
public final void propagate() {
final OnFlyCallGraph ofcg = pag.getOnFlyCallGraph();
new TopoSorter(pag, false).sort();
for (Object object : pag.allocSources()) {
handleAllocNode((AllocNode) object);
}
int iteration = 1;
boolean change;
do {
change = false;
TreeSet<VarNode> simpleSources = new TreeSet<VarNode>(pag.simpleSources());
if (pag.getOpts().verbose()) {
logger.debug("Iteration " + (iteration++));
}
for (VarNode object : simpleSources) {
change = handleSimples(object) | change;
}
if (ofcg != null) {
QueueReader<Node> addedEdges = pag.edgeReader();
for (VarNode src : pag.getVarNodeNumberer()) {
ofcg.updatedNode(src);
}
ofcg.build();
while (addedEdges.hasNext()) {
Node addedSrc = (Node) addedEdges.next();
Node addedTgt = (Node) addedEdges.next();
change = true;
if (addedSrc instanceof VarNode) {
PointsToSetInternal p2set = ((VarNode) addedSrc).getP2Set();
if (p2set != null)
p2set.unFlushNew();
} else if (addedSrc instanceof AllocNode) {
((VarNode) addedTgt).makeP2Set().add(addedSrc);
}
}
if (change) {
new TopoSorter(pag, false).sort();
}
}
for (FieldRefNode object : pag.loadSources()) {
change = handleLoads(object) | change;
}
for (VarNode object : pag.storeSources()) {
change = handleStores(object) | change;
}
for (NewInstanceNode object : pag.assignInstanceSources()) {
change = handleNewInstances(object) | change;
}
} while (change);
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class PropWorklist method propagate.
/**
* Actually does the propagation.
*/
public final void propagate() {
ofcg = pag.getOnFlyCallGraph();
new TopoSorter(pag, false).sort();
for (AllocNode object : pag.allocSources()) {
handleAllocNode(object);
}
boolean verbose = pag.getOpts().verbose();
do {
if (verbose) {
logger.debug("Worklist has " + varNodeWorkList.size() + " nodes.");
}
while (!varNodeWorkList.isEmpty()) {
VarNode src = varNodeWorkList.iterator().next();
varNodeWorkList.remove(src);
handleVarNode(src);
}
if (verbose) {
logger.debug("Now handling field references");
}
for (Object object : pag.storeSources()) {
final VarNode src = (VarNode) object;
Node[] targets = pag.storeLookup(src);
for (Node element0 : targets) {
final FieldRefNode target = (FieldRefNode) element0;
target.getBase().makeP2Set().forall(new P2SetVisitor() {
public final void visit(Node n) {
AllocDotField nDotF = pag.makeAllocDotField((AllocNode) n, target.getField());
if (ofcg != null) {
ofcg.updatedFieldRef(nDotF, src.getP2Set());
}
nDotF.makeP2Set().addAll(src.getP2Set(), null);
}
});
}
}
HashSet<Object[]> edgesToPropagate = new HashSet<Object[]>();
for (Object object : pag.loadSources()) {
handleFieldRefNode((FieldRefNode) object, edgesToPropagate);
}
Set<PointsToSetInternal> nodesToFlush = Collections.newSetFromMap(new IdentityHashMap<PointsToSetInternal, Boolean>());
for (Object[] pair : edgesToPropagate) {
PointsToSetInternal nDotF = (PointsToSetInternal) pair[0];
PointsToSetInternal newP2Set = nDotF.getNewSet();
VarNode loadTarget = (VarNode) pair[1];
if (loadTarget.makeP2Set().addAll(newP2Set, null)) {
varNodeWorkList.add(loadTarget);
}
nodesToFlush.add(nDotF);
}
for (PointsToSetInternal nDotF : nodesToFlush) {
nDotF.flushNew();
}
} while (!varNodeWorkList.isEmpty());
}
Aggregations