use of soot.jimple.spark.pag.VarNode in project soot by Sable.
the class MethodNodeFactory method caseStringConstant.
@Override
public final void caseStringConstant(StringConstant sc) {
AllocNode stringConstant;
if (pag.getOpts().string_constants() || Scene.v().containsClass(sc.value) || (sc.value.length() > 0 && sc.value.charAt(0) == '[')) {
stringConstant = pag.makeStringConstantNode(sc.value);
} else {
stringConstant = pag.makeAllocNode(PointsToAnalysis.STRING_NODE, RefType.v("java.lang.String"), null);
}
VarNode stringConstantLocal = pag.makeGlobalVarNode(stringConstant, RefType.v("java.lang.String"));
pag.addEdge(stringConstant, stringConstantLocal);
setResult(stringConstantLocal);
}
use of soot.jimple.spark.pag.VarNode in project soot by Sable.
the class MethodNodeFactory method handleStmt.
/**
* Adds the edges required for this statement to the graph.
*/
public final void handleStmt(Stmt s) {
// We only consider reflective class creation when it is enabled
if (s.containsInvokeExpr()) {
if (!pag.getCGOpts().types_for_invoke())
return;
InvokeExpr iexpr = s.getInvokeExpr();
if (iexpr instanceof VirtualInvokeExpr) {
if (!isReflectionNewInstance(iexpr))
return;
} else if (!(iexpr instanceof StaticInvokeExpr))
return;
}
s.apply(new AbstractStmtSwitch() {
@Override
public final void caseAssignStmt(AssignStmt as) {
Value l = as.getLeftOp();
Value r = as.getRightOp();
if (!(l.getType() instanceof RefLikeType))
return;
assert r.getType() instanceof RefLikeType : "Type mismatch in assignment " + as + " in method " + method.getSignature();
l.apply(MethodNodeFactory.this);
Node dest = getNode();
r.apply(MethodNodeFactory.this);
Node src = getNode();
if (l instanceof InstanceFieldRef) {
((InstanceFieldRef) l).getBase().apply(MethodNodeFactory.this);
pag.addDereference((VarNode) getNode());
}
if (r instanceof InstanceFieldRef) {
((InstanceFieldRef) r).getBase().apply(MethodNodeFactory.this);
pag.addDereference((VarNode) getNode());
} else if (r instanceof StaticFieldRef) {
StaticFieldRef sfr = (StaticFieldRef) r;
SootFieldRef s = sfr.getFieldRef();
if (pag.getOpts().empties_as_allocs()) {
if (s.declaringClass().getName().equals("java.util.Collections")) {
if (s.name().equals("EMPTY_SET")) {
src = pag.makeAllocNode(RefType.v("java.util.HashSet"), RefType.v("java.util.HashSet"), method);
} else if (s.name().equals("EMPTY_MAP")) {
src = pag.makeAllocNode(RefType.v("java.util.HashMap"), RefType.v("java.util.HashMap"), method);
} else if (s.name().equals("EMPTY_LIST")) {
src = pag.makeAllocNode(RefType.v("java.util.LinkedList"), RefType.v("java.util.LinkedList"), method);
}
} else if (s.declaringClass().getName().equals("java.util.Hashtable")) {
if (s.name().equals("emptyIterator")) {
src = pag.makeAllocNode(RefType.v("java.util.Hashtable$EmptyIterator"), RefType.v("java.util.Hashtable$EmptyIterator"), method);
} else if (s.name().equals("emptyEnumerator")) {
src = pag.makeAllocNode(RefType.v("java.util.Hashtable$EmptyEnumerator"), RefType.v("java.util.Hashtable$EmptyEnumerator"), method);
}
}
}
}
mpag.addInternalEdge(src, dest);
}
@Override
public final void caseReturnStmt(ReturnStmt rs) {
if (!(rs.getOp().getType() instanceof RefLikeType))
return;
rs.getOp().apply(MethodNodeFactory.this);
Node retNode = getNode();
mpag.addInternalEdge(retNode, caseRet());
}
@Override
public final void caseIdentityStmt(IdentityStmt is) {
if (!(is.getLeftOp().getType() instanceof RefLikeType))
return;
Value leftOp = is.getLeftOp();
Value rightOp = is.getRightOp();
leftOp.apply(MethodNodeFactory.this);
Node dest = getNode();
rightOp.apply(MethodNodeFactory.this);
Node src = getNode();
mpag.addInternalEdge(src, dest);
// in case library mode is activated add allocations to any
// possible type of this local and
// parameters of accessible methods
int libOption = pag.getCGOpts().library();
if (libOption != CGOptions.library_disabled && (accessibilityOracle.isAccessible(method))) {
if (rightOp instanceof IdentityRef) {
Type rt = rightOp.getType();
rt.apply(new SparkLibraryHelper(pag, src, method));
}
}
}
@Override
public final void caseThrowStmt(ThrowStmt ts) {
ts.getOp().apply(MethodNodeFactory.this);
mpag.addOutEdge(getNode(), pag.nodeFactory().caseThrow());
}
});
}
use of soot.jimple.spark.pag.VarNode in project soot by Sable.
the class MethodNodeFactory method caseThis.
public final Node caseThis() {
VarNode ret = pag.makeLocalVarNode(new Pair<SootMethod, String>(method, PointsToAnalysis.THIS_NODE), method.getDeclaringClass().getType(), method);
ret.setInterProcTarget();
return ret;
}
use of soot.jimple.spark.pag.VarNode in project soot by Sable.
the class DemandCSPointsTo method findContextsForAllocs.
protected AllocAndContextSet findContextsForAllocs(final VarAndContext varAndContext, PointsToSetInternal locs) {
if (contextForAllocsStack.contains(varAndContext)) {
// recursion; check depth
// we're fine for x = x.next
int oldIndex = contextForAllocsStack.indexOf(varAndContext);
if (oldIndex != contextForAllocsStack.size() - 1) {
if (recursionDepth == -1) {
recursionDepth = oldIndex + 1;
if (DEBUG) {
debugPrint("RECURSION depth = " + recursionDepth);
}
} else if (contextForAllocsStack.size() - oldIndex > 5) {
// just give up
throw new TerminateEarlyException();
}
}
}
contextForAllocsStack.push(varAndContext);
final AllocAndContextSet ret = new AllocAndContextSet();
final PointsToSetInternal realLocs = checkContextsForAllocsCache(varAndContext, ret, locs);
if (realLocs.isEmpty()) {
if (DEBUG) {
debugPrint("cached result " + ret);
}
contextForAllocsStack.pop();
return ret;
}
nesting++;
if (DEBUG) {
debugPrint("finding alloc contexts for " + varAndContext);
}
try {
final Set<VarAndContext> marked = new HashSet<VarAndContext>();
final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
p.prop(varAndContext);
IncomingEdgeHandler edgeHandler = new IncomingEdgeHandler() {
@Override
public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
if (realLocs.contains(allocNode)) {
if (DEBUG) {
debugPrint("found alloc " + allocNode);
}
ret.add(new AllocAndContext(allocNode, origVarAndContext.context));
}
}
@Override
public void handleMatchSrc(final VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase, VarNode storeBase, VarAndContext origVarAndContext, SparkField field, boolean refine) {
if (DEBUG) {
debugPrint("handling src " + matchSrc);
debugPrint("intersection " + intersection);
}
if (!refine) {
p.prop(new VarAndContext(matchSrc, EMPTY_CALLSTACK));
return;
}
AllocAndContextSet allocContexts = findContextsForAllocs(new VarAndContext(loadBase, origVarAndContext.context), intersection);
if (DEBUG) {
debugPrint("alloc contexts " + allocContexts);
}
for (AllocAndContext allocAndContext : allocContexts) {
if (DEBUG) {
debugPrint("alloc and context " + allocAndContext);
}
CallingContextSet matchSrcContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
matchSrcContexts = findUpContextsForVar(allocAndContext, new VarContextAndUp(storeBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
} else {
matchSrcContexts = findVarContextsFromAlloc(allocAndContext, storeBase);
}
for (ImmutableStack<Integer> matchSrcContext : matchSrcContexts) {
// ret
// .add(new Pair<AllocNode,
// ImmutableStack<Integer>>(
// (AllocNode) n,
// matchSrcContext));
// ret.addAll(findContextsForAllocs(matchSrc,
// matchSrcContext, locs));
p.prop(new VarAndContext(matchSrc, matchSrcContext));
}
}
}
@Override
Object getResult() {
return ret;
}
@Override
void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
p.prop(newVarAndContext);
}
@Override
boolean shouldHandleSrc(VarNode src) {
return realLocs.hasNonEmptyIntersection(src.getP2Set());
}
};
processIncomingEdges(edgeHandler, worklist);
// update the cache
if (recursionDepth != -1) {
// if we're beyond recursion, don't cache anything
if (contextForAllocsStack.size() > recursionDepth) {
if (DEBUG) {
debugPrint("REMOVING " + varAndContext);
debugPrint(contextForAllocsStack.toString());
}
contextsForAllocsCache.remove(varAndContext);
} else {
assert contextForAllocsStack.size() == recursionDepth : recursionDepth + " " + contextForAllocsStack;
recursionDepth = -1;
if (contextsForAllocsCache.containsKey(varAndContext)) {
contextsForAllocsCache.get(varAndContext).getO2().addAll(ret);
} else {
PointsToSetInternal storedSet = new HybridPointsToSet(locs.getType(), pag);
storedSet.addAll(locs, null);
contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, ret));
}
}
} else {
if (contextsForAllocsCache.containsKey(varAndContext)) {
contextsForAllocsCache.get(varAndContext).getO2().addAll(ret);
} else {
PointsToSetInternal storedSet = new HybridPointsToSet(locs.getType(), pag);
storedSet.addAll(locs, null);
contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, ret));
}
}
nesting--;
return ret;
} catch (CallSiteException e) {
contextsForAllocsCache.remove(varAndContext);
throw e;
} finally {
contextForAllocsStack.pop();
}
}
use of soot.jimple.spark.pag.VarNode in project soot by Sable.
the class DemandCSPointsTo method findUpContextsForVar.
protected CallingContextSet findUpContextsForVar(AllocAndContext allocAndContext, VarContextAndUp varContextAndUp) {
final AllocNode alloc = allocAndContext.alloc;
final ImmutableStack<Integer> allocContext = allocAndContext.context;
CallingContextSet tmpSet = checkUpContextCache(varContextAndUp, allocAndContext);
if (tmpSet != null) {
return tmpSet;
}
final CallingContextSet ret = new CallingContextSet();
upContextCache.get(varContextAndUp).put(allocAndContext, ret);
nesting++;
if (DEBUG) {
debugPrint("finding up context for " + varContextAndUp + " to " + alloc + " " + allocContext);
}
try {
final Set<VarAndContext> marked = new HashSet<VarAndContext>();
final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
p.prop(varContextAndUp);
class UpContextEdgeHandler extends IncomingEdgeHandler {
@Override
public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
if (allocNode == alloc) {
if (allocContext.topMatches(contextAndUp.context)) {
ImmutableStack<Integer> reverse = contextAndUp.upContext.reverse();
ImmutableStack<Integer> toAdd = allocContext.popAll(contextAndUp.context).pushAll(reverse);
if (DEBUG) {
debugPrint("found up context " + toAdd);
}
ret.add(toAdd);
} else if (contextAndUp.context.topMatches(allocContext)) {
ImmutableStack<Integer> toAdd = contextAndUp.upContext.reverse();
if (DEBUG) {
debugPrint("found up context " + toAdd);
}
ret.add(toAdd);
}
}
}
@Override
public void handleMatchSrc(VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase, VarNode storeBase, VarAndContext origVarAndContext, SparkField field, boolean refine) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
if (DEBUG) {
debugPrint("CHECKING " + alloc);
}
PointsToSetInternal tmp = new HybridPointsToSet(alloc.getType(), pag);
tmp.add(alloc);
AllocAndContextSet allocContexts = findContextsForAllocs(new VarAndContext(matchSrc, EMPTY_CALLSTACK), tmp);
// Set allocContexts = Collections.singleton(new Object());
if (!refine) {
if (!allocContexts.isEmpty()) {
ret.add(contextAndUp.upContext.reverse());
}
} else {
if (!allocContexts.isEmpty()) {
for (AllocAndContext t : allocContexts) {
ImmutableStack<Integer> discoveredAllocContext = t.context;
if (!allocContext.topMatches(discoveredAllocContext)) {
continue;
}
ImmutableStack<Integer> trueAllocContext = allocContext.popAll(discoveredAllocContext);
AllocAndContextSet allocAndContexts = findContextsForAllocs(new VarAndContext(storeBase, trueAllocContext), intersection);
for (AllocAndContext allocAndContext : allocAndContexts) {
// CallingContextSet upContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
ret.addAll(findUpContextsForVar(allocAndContext, new VarContextAndUp(loadBase, contextAndUp.context, contextAndUp.upContext)));
} else {
CallingContextSet tmpContexts = findVarContextsFromAlloc(allocAndContext, loadBase);
// upContexts = new CallingContextSet();
for (ImmutableStack<Integer> tmpContext : tmpContexts) {
if (tmpContext.topMatches(contextAndUp.context)) {
ImmutableStack<Integer> reverse = contextAndUp.upContext.reverse();
ImmutableStack<Integer> toAdd = tmpContext.popAll(contextAndUp.context).pushAll(reverse);
ret.add(toAdd);
}
}
}
}
}
}
}
}
@Override
Object getResult() {
return ret;
}
@Override
void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
ImmutableStack<Integer> upContext = contextAndUp.upContext;
ImmutableStack<Integer> newUpContext = upContext;
if (assignEdge.isParamEdge() && contextAndUp.context.isEmpty()) {
if (upContext.size() < ImmutableStack.getMaxSize()) {
newUpContext = pushWithRecursionCheck(upContext, assignEdge);
}
;
}
p.prop(new VarContextAndUp(newVarAndContext.var, newVarAndContext.context, newUpContext));
}
@Override
boolean shouldHandleSrc(VarNode src) {
if (src instanceof GlobalVarNode) {
// // for now, just give up
throw new TerminateEarlyException();
}
return src.getP2Set().contains(alloc);
}
}
;
UpContextEdgeHandler edgeHandler = new UpContextEdgeHandler();
processIncomingEdges(edgeHandler, worklist);
nesting--;
// }
return ret;
} catch (CallSiteException e) {
upContextCache.remove(varContextAndUp);
throw e;
}
}
Aggregations