use of soot.jimple.spark.pag.AllocDotField in project soot by Sable.
the class GeomPointsTo method finalizeInternalData.
/**
* 1. Update the call graph;
* 2. Eliminate the pointers, objects, and constraints related to the unreachable code.
*/
private void finalizeInternalData() {
// Compute the set of reachable functions after the points-to analysis
markReachableMethods();
// Clean the unreachable objects
for (Iterator<IVarAbstraction> it = allocations.iterator(); it.hasNext(); ) {
IVarAbstraction po = it.next();
AllocNode obj = (AllocNode) po.getWrappedNode();
SootMethod sm = obj.getMethod();
if (sm != null && func2int.containsKey(sm) == false)
it.remove();
}
// Clean the unreachable pointers
final Vector<AllocNode> removeSet = new Vector<AllocNode>();
for (Iterator<IVarAbstraction> it = pointers.iterator(); it.hasNext(); ) {
IVarAbstraction pn = it.next();
// Is this pointer obsoleted?
Node vn = pn.getWrappedNode();
SootMethod sm = null;
if (vn instanceof LocalVarNode) {
sm = ((LocalVarNode) vn).getMethod();
} else if (vn instanceof AllocDotField) {
sm = ((AllocDotField) vn).getBase().getMethod();
}
if (sm != null) {
if (func2int.containsKey(sm) == false) {
pn.deleteAll();
vn.discardP2Set();
it.remove();
continue;
}
}
if (pn.getRepresentative() != pn)
continue;
removeSet.clear();
if (pn.hasPTResult()) {
// We remove the useless shapes or objects
Set<AllocNode> objSet = pn.get_all_points_to_objects();
for (Iterator<AllocNode> oit = objSet.iterator(); oit.hasNext(); ) {
AllocNode obj = oit.next();
IVarAbstraction po = consG.get(obj);
if (!po.reachable() || pn.isDeadObject(obj)) {
removeSet.add(obj);
}
}
for (AllocNode obj : removeSet) pn.remove_points_to(obj);
pn.drop_duplicates();
} else {
// We also remove unreachable objects for SPARK nodes
PointsToSetInternal pts = vn.getP2Set();
pts.forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction pan = findInternalNode(n);
// The removeSet is misused as a contains set
if (pan.reachable())
removeSet.add((AllocNode) n);
}
});
pts = vn.makeP2Set();
for (AllocNode an : removeSet) pts.add(an);
}
}
// Clean the useless constraints
for (Iterator<PlainConstraint> cIt = constraints.iterator(); cIt.hasNext(); ) {
PlainConstraint cons = cIt.next();
IVarAbstraction lhs = cons.getLHS();
IVarAbstraction rhs = cons.getRHS();
if (!lhs.reachable() || !rhs.reachable() || getMethodIDFromPtr(lhs) == Constants.UNKNOWN_FUNCTION || getMethodIDFromPtr(rhs) == Constants.UNKNOWN_FUNCTION) {
cIt.remove();
}
}
// We reassign the IDs to the pointers, objects and constraints
pointers.reassign();
allocations.reassign();
constraints.reassign();
}
use of soot.jimple.spark.pag.AllocDotField in project soot by Sable.
the class GeomPointsTo method preprocess.
/**
* Read in the program facts generated by SPARK.
* We also construct our own call graph and pointer variables.
*/
private void preprocess() {
int id;
int s, t;
// Build the call graph
n_func = Scene.v().getReachableMethods().size() + 1;
call_graph = new CgEdge[n_func];
n_calls = 0;
n_reach_spark_user_methods = 0;
id = 1;
QueueReader<MethodOrMethodContext> smList = Scene.v().getReachableMethods().listener();
CallGraph soot_callgraph = Scene.v().getCallGraph();
while (smList.hasNext()) {
final SootMethod func = smList.next().method();
func2int.put(func, id);
int2func.put(id, func);
/*
* We cannot identify all entry methods since some entry methods call themselves.
* In that case, the Soot CallGraph.isEntryMethod() function returns false.
*/
if (soot_callgraph.isEntryMethod(func) || func.isEntryMethod()) {
CgEdge p = new CgEdge(Constants.SUPER_MAIN, id, null, call_graph[Constants.SUPER_MAIN]);
call_graph[Constants.SUPER_MAIN] = p;
n_calls++;
}
if (!func.isJavaLibraryMethod())
++n_reach_spark_user_methods;
id++;
}
// Next, we scan all the call edges and rebuild the call graph in our own vocabulary
QueueReader<Edge> edgeList = Scene.v().getCallGraph().listener();
while (edgeList.hasNext()) {
Edge edge = edgeList.next();
if (edge.isClinit()) {
continue;
}
SootMethod src_func = edge.src();
SootMethod tgt_func = edge.tgt();
s = func2int.get(src_func);
t = func2int.get(tgt_func);
// Create a new call edge in our own format
CgEdge p = new CgEdge(s, t, edge, call_graph[s]);
call_graph[s] = p;
edgeMapping.put(edge, p);
// We collect callsite information
Stmt callsite = edge.srcStmt();
if (edge.isThreadRunCall() || edge.kind().isExecutor() || edge.kind().isAsyncTask()) {
// We don't modify the treatment to the thread run() calls
thread_run_callsites.add(callsite);
} else if (edge.isInstance() && !edge.isSpecial()) {
// We try to refine the virtual callsites (virtual + interface) with multiple call targets
InstanceInvokeExpr expr = (InstanceInvokeExpr) callsite.getInvokeExpr();
if (expr.getMethodRef().getSignature().contains("<java.lang.Thread: void start()>")) {
// It is a thread start function
thread_run_callsites.add(callsite);
} else {
p.base_var = findLocalVarNode(expr.getBase());
if (SootInfo.countCallEdgesForCallsite(callsite, true) > 1 && p.base_var != null) {
multiCallsites.add(callsite);
}
}
}
++n_calls;
}
// We build the wrappers for all the pointers built by SPARK
for (Iterator<VarNode> it = getVarNodeNumberer().iterator(); it.hasNext(); ) {
VarNode vn = it.next();
IVarAbstraction pn = makeInternalNode(vn);
pointers.add(pn);
}
for (Iterator<AllocDotField> it = getAllocDotFieldNodeNumberer().iterator(); it.hasNext(); ) {
AllocDotField adf = it.next();
// Some allocdotfield is invalid, we check and remove them
SparkField field = adf.getField();
if (field instanceof SootField) {
// This is an instance field of a class
Type decType = ((SootField) field).getDeclaringClass().getType();
Type baseType = adf.getBase().getType();
// baseType must be a sub type of decType
if (!castNeverFails(baseType, decType))
continue;
}
IVarAbstraction pn = makeInternalNode(adf);
pointers.add(pn);
}
for (Iterator<AllocNode> it = getAllocNodeNumberer().iterator(); it.hasNext(); ) {
AllocNode obj = it.next();
IVarAbstraction pn = makeInternalNode(obj);
allocations.add(pn);
}
// The address constraints, new obj -> p
for (Object object : allocSources()) {
IVarAbstraction obj = makeInternalNode((AllocNode) object);
Node[] succs = allocLookup((AllocNode) object);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
IVarAbstraction p = makeInternalNode(element0);
cons.expr.setPair(obj, p);
cons.type = Constants.NEW_CONS;
constraints.add(cons);
}
}
// The assign constraints, p -> q
Pair<Node, Node> intercall = new Pair<Node, Node>();
for (Object object : simpleSources()) {
IVarAbstraction p = makeInternalNode((VarNode) object);
Node[] succs = simpleLookup((VarNode) object);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
IVarAbstraction q = makeInternalNode(element0);
cons.expr.setPair(p, q);
cons.type = Constants.ASSIGN_CONS;
intercall.setPair((VarNode) object, element0);
cons.interCallEdges = lookupEdgesForAssignment(intercall);
constraints.add(cons);
}
}
intercall = null;
assign2edges.clear();
// The load constraints, p.f -> q
for (Object object : loadSources()) {
FieldRefNode frn = (FieldRefNode) object;
IVarAbstraction p = makeInternalNode(frn.getBase());
Node[] succs = loadLookup(frn);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
IVarAbstraction q = makeInternalNode(element0);
cons.f = frn.getField();
cons.expr.setPair(p, q);
cons.type = Constants.LOAD_CONS;
constraints.add(cons);
}
}
// The store constraints, p -> q.f
for (Object object : storeSources()) {
IVarAbstraction p = makeInternalNode((VarNode) object);
Node[] succs = storeLookup((VarNode) object);
for (Node element0 : succs) {
PlainConstraint cons = new PlainConstraint();
FieldRefNode frn = (FieldRefNode) element0;
IVarAbstraction q = makeInternalNode(frn.getBase());
cons.f = frn.getField();
cons.expr.setPair(p, q);
cons.type = Constants.STORE_CONS;
constraints.add(cons);
}
}
n_init_constraints = constraints.size();
// Initialize other stuff
low_cg = new int[n_func];
vis_cg = new int[n_func];
rep_cg = new int[n_func];
indeg_cg = new int[n_func];
scc_size = new int[n_func];
block_num = new int[n_func];
context_size = new long[n_func];
max_context_size_block = new long[n_func];
}
use of soot.jimple.spark.pag.AllocDotField in project soot by Sable.
the class SparkTransformer method findSetMass.
protected void findSetMass(PAG pag) {
int mass = 0;
int varMass = 0;
int adfs = 0;
int scalars = 0;
for (final VarNode v : pag.getVarNodeNumberer()) {
scalars++;
PointsToSetInternal set = v.getP2Set();
if (set != null)
mass += set.size();
if (set != null)
varMass += set.size();
}
for (final AllocNode an : pag.allocSources()) {
for (final AllocDotField adf : an.getFields()) {
PointsToSetInternal set = adf.getP2Set();
if (set != null)
mass += set.size();
if (set != null && set.size() > 0) {
adfs++;
}
}
}
logger.debug("Set mass: " + mass);
logger.debug("Variable mass: " + varMass);
logger.debug("Scalars: " + scalars);
logger.debug("adfs: " + adfs);
// Compute points-to set sizes of dereference sites BEFORE
// trimming sets by declared type
int[] deRefCounts = new int[30001];
for (VarNode v : pag.getDereferences()) {
PointsToSetInternal set = v.getP2Set();
int size = 0;
if (set != null)
size = set.size();
deRefCounts[size]++;
}
int total = 0;
for (int element : deRefCounts) total += element;
logger.debug("Dereference counts BEFORE trimming (total = " + total + "):");
for (int i = 0; i < deRefCounts.length; i++) {
if (deRefCounts[i] > 0) {
logger.debug("" + i + " " + deRefCounts[i] + " " + (deRefCounts[i] * 100.0 / total) + "%");
}
}
}
use of soot.jimple.spark.pag.AllocDotField in project soot by Sable.
the class GeomQueries method contextsGoBy.
/**
* Searching the points-to results for field expression such as p.f.
*
* @param sootEdge
* @param l
* @param field
* @param visitor
* @return
*/
@SuppressWarnings("rawtypes")
public boolean contextsGoBy(Edge sootEdge, Local l, SparkField field, PtSensVisitor visitor) {
Obj_full_extractor pts_l = new Obj_full_extractor();
if (contextsGoBy(sootEdge, l, pts_l) == false)
return false;
visitor.prepare();
for (IntervalContextVar icv : pts_l.outList) {
AllocNode obj = (AllocNode) icv.var;
AllocDotField obj_f = geomPTA.findAllocDotField(obj, field);
if (obj_f == null)
continue;
IVarAbstraction objField = geomPTA.findInternalNode(obj_f);
if (objField == null)
continue;
long L = icv.L;
long R = icv.R;
assert L < R;
objField.get_all_context_sensitive_objects(L, R, visitor);
}
pts_l = null;
visitor.finish();
return visitor.numOfDiffObjects() != 0;
}
use of soot.jimple.spark.pag.AllocDotField in project soot by Sable.
the class PropWorklist method propagate.
/**
* Actually does the propagation.
*/
public final void propagate() {
ofcg = pag.getOnFlyCallGraph();
new TopoSorter(pag, false).sort();
for (AllocNode object : pag.allocSources()) {
handleAllocNode(object);
}
boolean verbose = pag.getOpts().verbose();
do {
if (verbose) {
logger.debug("Worklist has " + varNodeWorkList.size() + " nodes.");
}
while (!varNodeWorkList.isEmpty()) {
VarNode src = varNodeWorkList.iterator().next();
varNodeWorkList.remove(src);
handleVarNode(src);
}
if (verbose) {
logger.debug("Now handling field references");
}
for (Object object : pag.storeSources()) {
final VarNode src = (VarNode) object;
Node[] targets = pag.storeLookup(src);
for (Node element0 : targets) {
final FieldRefNode target = (FieldRefNode) element0;
target.getBase().makeP2Set().forall(new P2SetVisitor() {
public final void visit(Node n) {
AllocDotField nDotF = pag.makeAllocDotField((AllocNode) n, target.getField());
if (ofcg != null) {
ofcg.updatedFieldRef(nDotF, src.getP2Set());
}
nDotF.makeP2Set().addAll(src.getP2Set(), null);
}
});
}
}
HashSet<Object[]> edgesToPropagate = new HashSet<Object[]>();
for (Object object : pag.loadSources()) {
handleFieldRefNode((FieldRefNode) object, edgesToPropagate);
}
Set<PointsToSetInternal> nodesToFlush = Collections.newSetFromMap(new IdentityHashMap<PointsToSetInternal, Boolean>());
for (Object[] pair : edgesToPropagate) {
PointsToSetInternal nDotF = (PointsToSetInternal) pair[0];
PointsToSetInternal newP2Set = nDotF.getNewSet();
VarNode loadTarget = (VarNode) pair[1];
if (loadTarget.makeP2Set().addAll(newP2Set, null)) {
varNodeWorkList.add(loadTarget);
}
nodesToFlush.add(nDotF);
}
for (PointsToSetInternal nDotF : nodesToFlush) {
nDotF.flushNew();
}
} while (!varNodeWorkList.isEmpty());
}
Aggregations