use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class EvalResults method estimateHeapDefuseGraph.
/**
* Estimate the size of the def-use graph for the heap memory. The heap
* graph is estimated without context information.
*/
public void estimateHeapDefuseGraph() {
final Map<IVarAbstraction, int[]> defUseCounterForGeom = new HashMap<IVarAbstraction, int[]>();
final Map<AllocDotField, int[]> defUseCounterForSpark = new HashMap<AllocDotField, int[]>();
Date begin = new Date();
for (SootMethod sm : ptsProvider.getAllReachableMethods()) {
if (sm.isJavaLibraryMethod())
continue;
if (!sm.isConcrete())
continue;
if (!sm.hasActiveBody()) {
sm.retrieveActiveBody();
}
if (!ptsProvider.isValidMethod(sm))
continue;
// We first gather all the memory access expressions
for (Iterator<Unit> stmts = sm.getActiveBody().getUnits().iterator(); stmts.hasNext(); ) {
Stmt st = (Stmt) stmts.next();
if (!(st instanceof AssignStmt))
continue;
AssignStmt a = (AssignStmt) st;
final Value lValue = a.getLeftOp();
final Value rValue = a.getRightOp();
InstanceFieldRef ifr = null;
if (lValue instanceof InstanceFieldRef) {
// Def statement
ifr = (InstanceFieldRef) lValue;
} else if (rValue instanceof InstanceFieldRef) {
// Use statement
ifr = (InstanceFieldRef) rValue;
}
if (ifr != null) {
final SootField field = ifr.getField();
LocalVarNode vn = ptsProvider.findLocalVarNode((Local) ifr.getBase());
if (vn == null)
continue;
IVarAbstraction pn = ptsProvider.findInternalNode(vn);
if (pn == null)
continue;
pn = pn.getRepresentative();
if (!pn.hasPTResult())
continue;
// Spark
vn.getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction padf = ptsProvider.findAndInsertInstanceField((AllocNode) n, field);
AllocDotField adf = (AllocDotField) padf.getWrappedNode();
int[] defUseUnit = defUseCounterForSpark.get(adf);
if (defUseUnit == null) {
defUseUnit = new int[2];
defUseCounterForSpark.put(adf, defUseUnit);
}
if (lValue instanceof InstanceFieldRef) {
defUseUnit[0]++;
} else {
defUseUnit[1]++;
}
}
});
// Geom
Set<AllocNode> objsSet = pn.get_all_points_to_objects();
for (AllocNode obj : objsSet) {
/*
* We will create a lot of instance fields. Because in
* points-to analysis, we concern only the reference
* type fields. But here, we concern all the fields read
* write including the primitive type fields.
*/
IVarAbstraction padf = ptsProvider.findAndInsertInstanceField(obj, field);
int[] defUseUnit = defUseCounterForGeom.get(padf);
if (defUseUnit == null) {
defUseUnit = new int[2];
defUseCounterForGeom.put(padf, defUseUnit);
}
if (lValue instanceof InstanceFieldRef) {
defUseUnit[0]++;
} else {
defUseUnit[1]++;
}
}
}
}
}
for (int[] defUseUnit : defUseCounterForSpark.values()) {
evalRes.n_spark_du_pairs += ((long) defUseUnit[0]) * defUseUnit[1];
}
for (int[] defUseUnit : defUseCounterForGeom.values()) {
evalRes.n_geom_du_pairs += ((long) defUseUnit[0]) * defUseUnit[1];
}
Date end = new Date();
ptsProvider.ps.println();
ptsProvider.ps.println("-----------> Heap Def Use Graph Evaluation <------------");
ptsProvider.ps.println("The edges in the heap def-use graph is (by Geom): " + evalRes.n_geom_du_pairs);
ptsProvider.ps.println("The edges in the heap def-use graph is (by Spark): " + evalRes.n_spark_du_pairs);
ptsProvider.ps.printf("Using time: %dms \n", end.getTime() - begin.getTime());
ptsProvider.ps.println();
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class EvalResults method test_1cfa_call_graph.
/**
* We assess the quality of building the 1-cfa call graph with the geometric
* points-to result.
*/
private void test_1cfa_call_graph(LocalVarNode vn, SootMethod caller, SootMethod callee_signature, Histogram ce_range) {
long l, r;
IVarAbstraction pn = ptsProvider.findInternalNode(vn);
if (pn == null)
return;
pn = pn.getRepresentative();
Set<SootMethod> tgts = new HashSet<SootMethod>();
Set<AllocNode> set = pn.get_all_points_to_objects();
LinkedList<CgEdge> list = ptsProvider.getCallEdgesInto(ptsProvider.getIDFromSootMethod(caller));
FastHierarchy hierarchy = Scene.v().getOrMakeFastHierarchy();
for (Iterator<CgEdge> it = list.iterator(); it.hasNext(); ) {
CgEdge p = it.next();
l = p.map_offset;
r = l + ptsProvider.max_context_size_block[p.s];
tgts.clear();
for (AllocNode obj : set) {
if (!pn.pointer_interval_points_to(l, r, obj))
continue;
Type t = obj.getType();
if (t == null)
continue;
else if (t instanceof AnySubType)
t = ((AnySubType) t).getBase();
else if (t instanceof ArrayType)
t = RefType.v("java.lang.Object");
try {
tgts.add(hierarchy.resolveConcreteDispatch(((RefType) t).getSootClass(), callee_signature));
} catch (Exception e) {
}
}
tgts.remove(null);
ce_range.addNumber(tgts.size());
}
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class EvalResults method checkCastsSafety.
/**
* Count how many static casts can be determined safe.
*/
public void checkCastsSafety() {
for (SootMethod sm : ptsProvider.getAllReachableMethods()) {
if (sm.isJavaLibraryMethod())
continue;
if (!sm.isConcrete())
continue;
if (!sm.hasActiveBody()) {
sm.retrieveActiveBody();
}
if (!ptsProvider.isValidMethod(sm))
continue;
// All the statements in the method
for (Iterator<Unit> stmts = sm.getActiveBody().getUnits().iterator(); stmts.hasNext(); ) {
Stmt st = (Stmt) stmts.next();
if (st instanceof AssignStmt) {
Value rhs = ((AssignStmt) st).getRightOp();
Value lhs = ((AssignStmt) st).getLeftOp();
if (rhs instanceof CastExpr && lhs.getType() instanceof RefLikeType) {
Value v = ((CastExpr) rhs).getOp();
VarNode node = ptsProvider.findLocalVarNode(v);
if (node == null)
continue;
IVarAbstraction pn = ptsProvider.findInternalNode(node);
if (pn == null)
continue;
pn = pn.getRepresentative();
if (!pn.hasPTResult())
continue;
evalRes.total_casts++;
final Type targetType = (RefLikeType) ((CastExpr) rhs).getCastType();
// We first use the geometric points-to result to
// evaluate
solved = true;
Set<AllocNode> set = pn.get_all_points_to_objects();
for (AllocNode obj : set) {
solved = ptsProvider.castNeverFails(obj.getType(), targetType);
if (solved == false)
break;
}
if (solved)
evalRes.geom_solved_casts++;
// Second is the SPARK result
solved = true;
node.getP2Set().forall(new P2SetVisitor() {
public void visit(Node arg0) {
if (solved == false)
return;
solved = ptsProvider.castNeverFails(arg0.getType(), targetType);
}
});
if (solved)
evalRes.spark_solved_casts++;
}
}
}
}
ptsProvider.ps.println();
ptsProvider.ps.println("-----------> Static Casts Safety Evaluation <------------");
ptsProvider.ps.println("Total casts (app code): " + evalRes.total_casts);
ptsProvider.ps.println("Safe casts: Geom = " + evalRes.geom_solved_casts + ", SPARK = " + evalRes.spark_solved_casts);
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class MethodNodeFactory method caseClassConstant.
@Override
public final void caseClassConstant(ClassConstant cc) {
AllocNode classConstant = pag.makeClassConstantNode(cc);
VarNode classConstantLocal = pag.makeGlobalVarNode(classConstant, RefType.v("java.lang.Class"));
pag.addEdge(classConstant, classConstantLocal);
setResult(classConstantLocal);
}
use of soot.jimple.spark.pag.AllocNode in project soot by Sable.
the class MethodNodeFactory method caseNewMultiArrayExpr.
@Override
public final void caseNewMultiArrayExpr(NewMultiArrayExpr nmae) {
ArrayType type = (ArrayType) nmae.getType();
AllocNode prevAn = pag.makeAllocNode(new Pair<Expr, Integer>(nmae, new Integer(type.numDimensions)), type, method);
VarNode prevVn = pag.makeLocalVarNode(prevAn, prevAn.getType(), method);
mpag.addInternalEdge(prevAn, prevVn);
setResult(prevAn);
while (true) {
Type t = type.getElementType();
if (!(t instanceof ArrayType))
break;
type = (ArrayType) t;
AllocNode an = pag.makeAllocNode(new Pair<Expr, Integer>(nmae, new Integer(type.numDimensions)), type, method);
VarNode vn = pag.makeLocalVarNode(an, an.getType(), method);
mpag.addInternalEdge(an, vn);
mpag.addInternalEdge(vn, pag.makeFieldRefNode(prevVn, ArrayElement.v()));
prevAn = an;
prevVn = vn;
}
}
Aggregations