use of soot.jimple.spark.pag.SparkField in project soot by Sable.
the class GeomPointsTo method preprocess.
/**
* Read in the program facts generated by SPARK.
* We also construct our own call graph and pointer variables.
*/
private void preprocess() {
int id;
int s, t;
// Build the call graph
n_func = Scene.v().getReachableMethods().size() + 1;
call_graph = new CgEdge[n_func];
n_calls = 0;
n_reach_spark_user_methods = 0;
id = 1;
QueueReader<MethodOrMethodContext> smList = Scene.v().getReachableMethods().listener();
CallGraph soot_callgraph = Scene.v().getCallGraph();
while (smList.hasNext()) {
final SootMethod func = smList.next().method();
func2int.put(func, id);
int2func.put(id, func);
/*
* We cannot identify all entry methods since some entry methods call themselves.
* In that case, the Soot CallGraph.isEntryMethod() function returns false.
*/
if (soot_callgraph.isEntryMethod(func) || func.isEntryMethod()) {
CgEdge p = new CgEdge(Constants.SUPER_MAIN, id, null, call_graph[Constants.SUPER_MAIN]);
call_graph[Constants.SUPER_MAIN] = p;
n_calls++;
}
if (!func.isJavaLibraryMethod())
++n_reach_spark_user_methods;
id++;
}
// Next, we scan all the call edges and rebuild the call graph in our own vocabulary
QueueReader<Edge> edgeList = Scene.v().getCallGraph().listener();
while (edgeList.hasNext()) {
Edge edge = edgeList.next();
if (edge.isClinit()) {
continue;
}
SootMethod src_func = edge.src();
SootMethod tgt_func = edge.tgt();
s = func2int.get(src_func);
t = func2int.get(tgt_func);
// Create a new call edge in our own format
CgEdge p = new CgEdge(s, t, edge, call_graph[s]);
call_graph[s] = p;
edgeMapping.put(edge, p);
// We collect callsite information
Stmt callsite = edge.srcStmt();
if (edge.isThreadRunCall() || edge.kind().isExecutor() || edge.kind().isAsyncTask()) {
// We don't modify the treatment to the thread run() calls
thread_run_callsites.add(callsite);
} else if (edge.isInstance() && !edge.isSpecial()) {
// We try to refine the virtual callsites (virtual + interface) with multiple call targets
InstanceInvokeExpr expr = (InstanceInvokeExpr) callsite.getInvokeExpr();
if (expr.getMethodRef().getSignature().contains("<java.lang.Thread: void start()>")) {
// It is a thread start function
thread_run_callsites.add(callsite);
} else {
p.base_var = findLocalVarNode(expr.getBase());
if (SootInfo.countCallEdgesForCallsite(callsite, true) > 1 && p.base_var != null) {
multiCallsites.add(callsite);
}
}
}
++n_calls;
}
// We build the wrappers for all the pointers built by SPARK
for (Iterator<VarNode> it = getVarNodeNumberer().iterator(); it.hasNext(); ) {
VarNode vn = it.next();
IVarAbstraction pn = makeInternalNode(vn);
pointers.add(pn);
}
for (Iterator<AllocDotField> it = getAllocDotFieldNodeNumberer().iterator(); it.hasNext(); ) {
AllocDotField adf = it.next();
// Some allocdotfield is invalid, we check and remove them
SparkField field = adf.getField();
if (field instanceof SootField) {
// This is an instance field of a class
Type decType = ((SootField) field).getDeclaringClass().getType();
Type baseType = adf.getBase().getType();
// baseType must be a sub type of decType
if (!castNeverFails(baseType, decType))
continue;
}
IVarAbstraction pn = makeInternalNode(adf);
pointers.add(pn);
}
for (Iterator<AllocNode> it = getAllocNodeNumberer().iterator(); it.hasNext(); ) {
AllocNode obj = it.next();
IVarAbstraction pn = makeInternalNode(obj);
allocations.add(pn);
}
// The address constraints, new obj -> p
for (Object object : allocSources()) {
IVarAbstraction obj = makeInternalNode((AllocNode) object);
Node[] succs = allocLookup((AllocNode) object);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
IVarAbstraction p = makeInternalNode(element0);
cons.expr.setPair(obj, p);
cons.type = Constants.NEW_CONS;
constraints.add(cons);
}
}
// The assign constraints, p -> q
Pair<Node, Node> intercall = new Pair<Node, Node>();
for (Object object : simpleSources()) {
IVarAbstraction p = makeInternalNode((VarNode) object);
Node[] succs = simpleLookup((VarNode) object);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
IVarAbstraction q = makeInternalNode(element0);
cons.expr.setPair(p, q);
cons.type = Constants.ASSIGN_CONS;
intercall.setPair((VarNode) object, element0);
cons.interCallEdges = lookupEdgesForAssignment(intercall);
constraints.add(cons);
}
}
intercall = null;
assign2edges.clear();
// The load constraints, p.f -> q
for (Object object : loadSources()) {
FieldRefNode frn = (FieldRefNode) object;
IVarAbstraction p = makeInternalNode(frn.getBase());
Node[] succs = loadLookup(frn);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
IVarAbstraction q = makeInternalNode(element0);
cons.f = frn.getField();
cons.expr.setPair(p, q);
cons.type = Constants.LOAD_CONS;
constraints.add(cons);
}
}
// The store constraints, p -> q.f
for (Object object : storeSources()) {
IVarAbstraction p = makeInternalNode((VarNode) object);
Node[] succs = storeLookup((VarNode) object);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
FieldRefNode frn = (FieldRefNode) element0;
IVarAbstraction q = makeInternalNode(frn.getBase());
cons.f = frn.getField();
cons.expr.setPair(p, q);
cons.type = Constants.STORE_CONS;
constraints.add(cons);
}
}
n_init_constraints = constraints.size();
// Initialize other stuff
low_cg = new int[n_func];
vis_cg = new int[n_func];
rep_cg = new int[n_func];
indeg_cg = new int[n_func];
scc_size = new int[n_func];
block_num = new int[n_func];
context_size = new long[n_func];
max_context_size_block = new long[n_func];
}
use of soot.jimple.spark.pag.SparkField in project soot by Sable.
the class OfflineProcessor method buildDependenceGraph.
/**
* The dependence graph reverses the assignment relations. E.g., p = q => p -> q
* Note that, the assignments that are eliminated by local variable merging should be used here.
* Otherwise, the graph would be erroneously disconnected.
*/
protected void buildDependenceGraph() {
for (PlainConstraint cons : geomPTA.constraints) {
// In our constraint representation, lhs -> rhs means rhs = lhs.
final IVarAbstraction lhs = cons.getLHS();
final IVarAbstraction rhs = cons.getRHS();
final SparkField field = cons.f;
IVarAbstraction rep;
// Now we use this constraint for graph construction
switch(cons.type) {
// rhs = lhs
case Constants.ASSIGN_CONS:
add_graph_edge(rhs.id, lhs.id);
break;
// rhs = lhs.f
case Constants.LOAD_CONS:
{
rep = lhs.getRepresentative();
if (rep.hasPTResult() == false) {
lhs.getWrappedNode().getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
if (padf == null || padf.reachable() == false)
return;
off_graph_edge e = add_graph_edge(rhs.id, padf.id);
e.base_var = lhs;
}
});
} else {
// Use geom
for (AllocNode o : rep.get_all_points_to_objects()) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
if (padf == null || padf.reachable() == false)
continue;
off_graph_edge e = add_graph_edge(rhs.id, padf.id);
e.base_var = lhs;
}
}
}
break;
// rhs.f = lhs
case Constants.STORE_CONS:
{
rep = rhs.getRepresentative();
if (rep.hasPTResult() == false) {
rhs.getWrappedNode().getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
if (padf == null || padf.reachable() == false)
return;
off_graph_edge e = add_graph_edge(padf.id, lhs.id);
e.base_var = rhs;
}
});
} else {
// use geom
for (AllocNode o : rep.get_all_points_to_objects()) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
if (padf == null || padf.reachable() == false)
continue;
off_graph_edge e = add_graph_edge(padf.id, lhs.id);
e.base_var = rhs;
}
}
}
break;
}
}
}
use of soot.jimple.spark.pag.SparkField in project soot by Sable.
the class OfflineProcessor method distillConstraints.
/**
* Eliminate the constraints that do not contribute points-to information to the seed pointers.
* Prerequisite: dependence graph
*/
protected void distillConstraints() {
IVarAbstraction pn;
// Mark the pointers
computeReachablePts();
// Mark the constraints
for (PlainConstraint cons : geomPTA.constraints) {
// We only look at the receiver pointers
pn = cons.getRHS();
final SparkField field = cons.f;
visitedFlag = false;
switch(cons.type) {
case Constants.NEW_CONS:
case Constants.ASSIGN_CONS:
case Constants.LOAD_CONS:
visitedFlag = pn.willUpdate;
break;
case Constants.STORE_CONS:
/**
* Interesting point in store constraint p.f = q:
* For example, pts(p) = { o1, o2 };
* If any of the o1.f and the o2.f (e.g. o1.f) will be updated, this constraint should be kept.
* However, in the points-to analysis, we only assign to o1.f.
*/
pn = pn.getRepresentative();
if (pn.hasPTResult() == false) {
pn.getWrappedNode().getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
if (visitedFlag)
return;
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) n, field);
if (padf == null || padf.reachable() == false)
return;
visitedFlag |= padf.willUpdate;
}
});
} else {
// Use the geometric points-to result
for (AllocNode o : pn.get_all_points_to_objects()) {
IVarAbstraction padf = geomPTA.findInstanceField((AllocNode) o, field);
if (padf == null || padf.reachable() == false)
continue;
visitedFlag |= padf.willUpdate;
if (visitedFlag)
break;
}
}
break;
}
cons.isActive = visitedFlag;
}
}
use of soot.jimple.spark.pag.SparkField in project soot by Sable.
the class DemandCSPointsTo method findContextsForAllocs.
protected AllocAndContextSet findContextsForAllocs(final VarAndContext varAndContext, PointsToSetInternal locs) {
if (contextForAllocsStack.contains(varAndContext)) {
// recursion; check depth
// we're fine for x = x.next
int oldIndex = contextForAllocsStack.indexOf(varAndContext);
if (oldIndex != contextForAllocsStack.size() - 1) {
if (recursionDepth == -1) {
recursionDepth = oldIndex + 1;
if (DEBUG) {
debugPrint("RECURSION depth = " + recursionDepth);
}
} else if (contextForAllocsStack.size() - oldIndex > 5) {
// just give up
throw new TerminateEarlyException();
}
}
}
contextForAllocsStack.push(varAndContext);
final AllocAndContextSet ret = new AllocAndContextSet();
final PointsToSetInternal realLocs = checkContextsForAllocsCache(varAndContext, ret, locs);
if (realLocs.isEmpty()) {
if (DEBUG) {
debugPrint("cached result " + ret);
}
contextForAllocsStack.pop();
return ret;
}
nesting++;
if (DEBUG) {
debugPrint("finding alloc contexts for " + varAndContext);
}
try {
final Set<VarAndContext> marked = new HashSet<VarAndContext>();
final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
p.prop(varAndContext);
IncomingEdgeHandler edgeHandler = new IncomingEdgeHandler() {
@Override
public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
if (realLocs.contains(allocNode)) {
if (DEBUG) {
debugPrint("found alloc " + allocNode);
}
ret.add(new AllocAndContext(allocNode, origVarAndContext.context));
}
}
@Override
public void handleMatchSrc(final VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase, VarNode storeBase, VarAndContext origVarAndContext, SparkField field, boolean refine) {
if (DEBUG) {
debugPrint("handling src " + matchSrc);
debugPrint("intersection " + intersection);
}
if (!refine) {
p.prop(new VarAndContext(matchSrc, EMPTY_CALLSTACK));
return;
}
AllocAndContextSet allocContexts = findContextsForAllocs(new VarAndContext(loadBase, origVarAndContext.context), intersection);
if (DEBUG) {
debugPrint("alloc contexts " + allocContexts);
}
for (AllocAndContext allocAndContext : allocContexts) {
if (DEBUG) {
debugPrint("alloc and context " + allocAndContext);
}
CallingContextSet matchSrcContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
matchSrcContexts = findUpContextsForVar(allocAndContext, new VarContextAndUp(storeBase, EMPTY_CALLSTACK, EMPTY_CALLSTACK));
} else {
matchSrcContexts = findVarContextsFromAlloc(allocAndContext, storeBase);
}
for (ImmutableStack<Integer> matchSrcContext : matchSrcContexts) {
// ret
// .add(new Pair<AllocNode,
// ImmutableStack<Integer>>(
// (AllocNode) n,
// matchSrcContext));
// ret.addAll(findContextsForAllocs(matchSrc,
// matchSrcContext, locs));
p.prop(new VarAndContext(matchSrc, matchSrcContext));
}
}
}
@Override
Object getResult() {
return ret;
}
@Override
void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
p.prop(newVarAndContext);
}
@Override
boolean shouldHandleSrc(VarNode src) {
return realLocs.hasNonEmptyIntersection(src.getP2Set());
}
};
processIncomingEdges(edgeHandler, worklist);
// update the cache
if (recursionDepth != -1) {
// if we're beyond recursion, don't cache anything
if (contextForAllocsStack.size() > recursionDepth) {
if (DEBUG) {
debugPrint("REMOVING " + varAndContext);
debugPrint(contextForAllocsStack.toString());
}
contextsForAllocsCache.remove(varAndContext);
} else {
assert contextForAllocsStack.size() == recursionDepth : recursionDepth + " " + contextForAllocsStack;
recursionDepth = -1;
if (contextsForAllocsCache.containsKey(varAndContext)) {
contextsForAllocsCache.get(varAndContext).getO2().addAll(ret);
} else {
PointsToSetInternal storedSet = new HybridPointsToSet(locs.getType(), pag);
storedSet.addAll(locs, null);
contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, ret));
}
}
} else {
if (contextsForAllocsCache.containsKey(varAndContext)) {
contextsForAllocsCache.get(varAndContext).getO2().addAll(ret);
} else {
PointsToSetInternal storedSet = new HybridPointsToSet(locs.getType(), pag);
storedSet.addAll(locs, null);
contextsForAllocsCache.put(varAndContext, new Pair<PointsToSetInternal, AllocAndContextSet>(storedSet, ret));
}
}
nesting--;
return ret;
} catch (CallSiteException e) {
contextsForAllocsCache.remove(varAndContext);
throw e;
} finally {
contextForAllocsStack.pop();
}
}
use of soot.jimple.spark.pag.SparkField in project soot by Sable.
the class DemandCSPointsTo method findUpContextsForVar.
protected CallingContextSet findUpContextsForVar(AllocAndContext allocAndContext, VarContextAndUp varContextAndUp) {
final AllocNode alloc = allocAndContext.alloc;
final ImmutableStack<Integer> allocContext = allocAndContext.context;
CallingContextSet tmpSet = checkUpContextCache(varContextAndUp, allocAndContext);
if (tmpSet != null) {
return tmpSet;
}
final CallingContextSet ret = new CallingContextSet();
upContextCache.get(varContextAndUp).put(allocAndContext, ret);
nesting++;
if (DEBUG) {
debugPrint("finding up context for " + varContextAndUp + " to " + alloc + " " + allocContext);
}
try {
final Set<VarAndContext> marked = new HashSet<VarAndContext>();
final Stack<VarAndContext> worklist = new Stack<VarAndContext>();
final Propagator<VarAndContext> p = new Propagator<VarAndContext>(marked, worklist);
p.prop(varContextAndUp);
class UpContextEdgeHandler extends IncomingEdgeHandler {
@Override
public void handleAlloc(AllocNode allocNode, VarAndContext origVarAndContext) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
if (allocNode == alloc) {
if (allocContext.topMatches(contextAndUp.context)) {
ImmutableStack<Integer> reverse = contextAndUp.upContext.reverse();
ImmutableStack<Integer> toAdd = allocContext.popAll(contextAndUp.context).pushAll(reverse);
if (DEBUG) {
debugPrint("found up context " + toAdd);
}
ret.add(toAdd);
} else if (contextAndUp.context.topMatches(allocContext)) {
ImmutableStack<Integer> toAdd = contextAndUp.upContext.reverse();
if (DEBUG) {
debugPrint("found up context " + toAdd);
}
ret.add(toAdd);
}
}
}
@Override
public void handleMatchSrc(VarNode matchSrc, PointsToSetInternal intersection, VarNode loadBase, VarNode storeBase, VarAndContext origVarAndContext, SparkField field, boolean refine) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
if (DEBUG) {
debugPrint("CHECKING " + alloc);
}
PointsToSetInternal tmp = new HybridPointsToSet(alloc.getType(), pag);
tmp.add(alloc);
AllocAndContextSet allocContexts = findContextsForAllocs(new VarAndContext(matchSrc, EMPTY_CALLSTACK), tmp);
// Set allocContexts = Collections.singleton(new Object());
if (!refine) {
if (!allocContexts.isEmpty()) {
ret.add(contextAndUp.upContext.reverse());
}
} else {
if (!allocContexts.isEmpty()) {
for (AllocAndContext t : allocContexts) {
ImmutableStack<Integer> discoveredAllocContext = t.context;
if (!allocContext.topMatches(discoveredAllocContext)) {
continue;
}
ImmutableStack<Integer> trueAllocContext = allocContext.popAll(discoveredAllocContext);
AllocAndContextSet allocAndContexts = findContextsForAllocs(new VarAndContext(storeBase, trueAllocContext), intersection);
for (AllocAndContext allocAndContext : allocAndContexts) {
// CallingContextSet upContexts;
if (fieldCheckHeuristic.validFromBothEnds(field)) {
ret.addAll(findUpContextsForVar(allocAndContext, new VarContextAndUp(loadBase, contextAndUp.context, contextAndUp.upContext)));
} else {
CallingContextSet tmpContexts = findVarContextsFromAlloc(allocAndContext, loadBase);
// upContexts = new CallingContextSet();
for (ImmutableStack<Integer> tmpContext : tmpContexts) {
if (tmpContext.topMatches(contextAndUp.context)) {
ImmutableStack<Integer> reverse = contextAndUp.upContext.reverse();
ImmutableStack<Integer> toAdd = tmpContext.popAll(contextAndUp.context).pushAll(reverse);
ret.add(toAdd);
}
}
}
}
}
}
}
}
@Override
Object getResult() {
return ret;
}
@Override
void handleAssignSrc(VarAndContext newVarAndContext, VarAndContext origVarAndContext, AssignEdge assignEdge) {
VarContextAndUp contextAndUp = (VarContextAndUp) origVarAndContext;
ImmutableStack<Integer> upContext = contextAndUp.upContext;
ImmutableStack<Integer> newUpContext = upContext;
if (assignEdge.isParamEdge() && contextAndUp.context.isEmpty()) {
if (upContext.size() < ImmutableStack.getMaxSize()) {
newUpContext = pushWithRecursionCheck(upContext, assignEdge);
}
;
}
p.prop(new VarContextAndUp(newVarAndContext.var, newVarAndContext.context, newUpContext));
}
@Override
boolean shouldHandleSrc(VarNode src) {
if (src instanceof GlobalVarNode) {
// // for now, just give up
throw new TerminateEarlyException();
}
return src.getP2Set().contains(alloc);
}
}
;
UpContextEdgeHandler edgeHandler = new UpContextEdgeHandler();
processIncomingEdges(edgeHandler, worklist);
nesting--;
// }
return ret;
} catch (CallSiteException e) {
upContextCache.remove(varContextAndUp);
throw e;
}
}
Aggregations