use of soot.jimple.spark.pag.AllocDotField in project soot by Sable.
the class GeomPointsTo method findAndInsertInstanceField.
/**
* Obtain or create an internal representation of an object field.
*/
public IVarAbstraction findAndInsertInstanceField(AllocNode obj, SparkField field) {
AllocDotField af = findAllocDotField(obj, field);
IVarAbstraction pn = null;
if (af == null) {
// We create a new instance field node w.r.t type compatiblity
Type decType = ((SootField) field).getDeclaringClass().getType();
Type baseType = obj.getType();
// baseType must be a sub type of decType
if (typeManager.castNeverFails(baseType, decType)) {
af = makeAllocDotField(obj, field);
pn = makeInternalNode(af);
pointers.add(pn);
}
} else {
pn = consG.get(af);
}
return pn;
}
use of soot.jimple.spark.pag.AllocDotField in project soot by Sable.
the class GeomPointsTo method getMethodIDFromPtr.
/**
* Get the index of the enclosing function of the specified node.
*/
public int getMethodIDFromPtr(IVarAbstraction pn) {
SootMethod sm = null;
int ret = Constants.SUPER_MAIN;
Node node = pn.getWrappedNode();
if (node instanceof AllocNode) {
sm = ((AllocNode) node).getMethod();
} else if (node instanceof LocalVarNode) {
sm = ((LocalVarNode) node).getMethod();
} else if (node instanceof AllocDotField) {
sm = ((AllocDotField) node).getBase().getMethod();
}
if (sm != null && func2int.containsKey(sm)) {
int id = func2int.get(sm);
if (vis_cg[id] == 0)
ret = Constants.UNKNOWN_FUNCTION;
else
ret = id;
}
return ret;
}
use of soot.jimple.spark.pag.AllocDotField in project soot by Sable.
the class EvalResults method estimateHeapDefuseGraph.
/**
* Estimate the size of the def-use graph for the heap memory. The heap
* graph is estimated without context information.
*/
public void estimateHeapDefuseGraph() {
final Map<IVarAbstraction, int[]> defUseCounterForGeom = new HashMap<IVarAbstraction, int[]>();
final Map<AllocDotField, int[]> defUseCounterForSpark = new HashMap<AllocDotField, int[]>();
Date begin = new Date();
for (SootMethod sm : ptsProvider.getAllReachableMethods()) {
if (sm.isJavaLibraryMethod())
continue;
if (!sm.isConcrete())
continue;
if (!sm.hasActiveBody()) {
sm.retrieveActiveBody();
}
if (!ptsProvider.isValidMethod(sm))
continue;
// We first gather all the memory access expressions
for (Iterator<Unit> stmts = sm.getActiveBody().getUnits().iterator(); stmts.hasNext(); ) {
Stmt st = (Stmt) stmts.next();
if (!(st instanceof AssignStmt))
continue;
AssignStmt a = (AssignStmt) st;
final Value lValue = a.getLeftOp();
final Value rValue = a.getRightOp();
InstanceFieldRef ifr = null;
if (lValue instanceof InstanceFieldRef) {
// Def statement
ifr = (InstanceFieldRef) lValue;
} else if (rValue instanceof InstanceFieldRef) {
// Use statement
ifr = (InstanceFieldRef) rValue;
}
if (ifr != null) {
final SootField field = ifr.getField();
LocalVarNode vn = ptsProvider.findLocalVarNode((Local) ifr.getBase());
if (vn == null)
continue;
IVarAbstraction pn = ptsProvider.findInternalNode(vn);
if (pn == null)
continue;
pn = pn.getRepresentative();
if (!pn.hasPTResult())
continue;
// Spark
vn.getP2Set().forall(new P2SetVisitor() {
@Override
public void visit(Node n) {
IVarAbstraction padf = ptsProvider.findAndInsertInstanceField((AllocNode) n, field);
AllocDotField adf = (AllocDotField) padf.getWrappedNode();
int[] defUseUnit = defUseCounterForSpark.get(adf);
if (defUseUnit == null) {
defUseUnit = new int[2];
defUseCounterForSpark.put(adf, defUseUnit);
}
if (lValue instanceof InstanceFieldRef) {
defUseUnit[0]++;
} else {
defUseUnit[1]++;
}
}
});
// Geom
Set<AllocNode> objsSet = pn.get_all_points_to_objects();
for (AllocNode obj : objsSet) {
/*
* We will create a lot of instance fields. Because in
* points-to analysis, we concern only the reference
* type fields. But here, we concern all the fields read
* write including the primitive type fields.
*/
IVarAbstraction padf = ptsProvider.findAndInsertInstanceField(obj, field);
int[] defUseUnit = defUseCounterForGeom.get(padf);
if (defUseUnit == null) {
defUseUnit = new int[2];
defUseCounterForGeom.put(padf, defUseUnit);
}
if (lValue instanceof InstanceFieldRef) {
defUseUnit[0]++;
} else {
defUseUnit[1]++;
}
}
}
}
}
for (int[] defUseUnit : defUseCounterForSpark.values()) {
evalRes.n_spark_du_pairs += ((long) defUseUnit[0]) * defUseUnit[1];
}
for (int[] defUseUnit : defUseCounterForGeom.values()) {
evalRes.n_geom_du_pairs += ((long) defUseUnit[0]) * defUseUnit[1];
}
Date end = new Date();
ptsProvider.ps.println();
ptsProvider.ps.println("-----------> Heap Def Use Graph Evaluation <------------");
ptsProvider.ps.println("The edges in the heap def-use graph is (by Geom): " + evalRes.n_geom_du_pairs);
ptsProvider.ps.println("The edges in the heap def-use graph is (by Spark): " + evalRes.n_spark_du_pairs);
ptsProvider.ps.printf("Using time: %dms \n", end.getTime() - begin.getTime());
ptsProvider.ps.println();
}
use of soot.jimple.spark.pag.AllocDotField in project soot by Sable.
the class EvalResults method profileGeomBasicMetrics.
/**
* Summarize the geometric points-to analysis and report the basic metrics.
*/
public void profileGeomBasicMetrics(boolean testSpark) {
int n_legal_var = 0, n_alloc_dot_fields = 0;
int[] limits = new int[] { 1, 5, 10, 25, 50, 75, 100 };
evalRes.pts_size_bar_geom = new Histogram(limits);
if (testSpark) {
evalRes.total_spark_pts = 0;
evalRes.max_pts_spark = 0;
evalRes.pts_size_bar_spark = new Histogram(limits);
}
// We first count the LOC
for (SootMethod sm : ptsProvider.getAllReachableMethods()) {
if (!sm.isConcrete())
continue;
if (!sm.hasActiveBody()) {
sm.retrieveActiveBody();
}
evalRes.loc += sm.getActiveBody().getUnits().size();
}
for (IVarAbstraction pn : ptsProvider.pointers) {
// points-to information is equivalent to SPARK
if (!pn.hasPTResult())
continue;
pn = pn.getRepresentative();
Node var = pn.getWrappedNode();
if (ptsProvider.isExceptionPointer(var))
continue;
if (var instanceof AllocDotField) {
++n_alloc_dot_fields;
}
++n_legal_var;
// ...spark
int size;
if (testSpark) {
size = var.getP2Set().size();
evalRes.pts_size_bar_spark.addNumber(size);
evalRes.total_spark_pts += size;
if (size > evalRes.max_pts_spark)
evalRes.max_pts_spark = size;
}
// ...geom
size = pn.num_of_diff_objs();
evalRes.pts_size_bar_geom.addNumber(size);
evalRes.total_geom_ins_pts += size;
if (size > evalRes.max_pts_geom)
evalRes.max_pts_geom = size;
}
evalRes.avg_geom_ins_pts = (double) evalRes.total_geom_ins_pts / n_legal_var;
if (testSpark)
evalRes.avg_spark_pts = (double) evalRes.total_spark_pts / n_legal_var;
outputer.println("");
outputer.println("----------Statistical Result of geomPTA <Data Format: geomPTA (SPARK)>----------");
outputer.printf("Lines of code (jimple): %.1fK\n", (double) evalRes.loc / 1000);
outputer.printf("Reachable Methods: %d (%d)\n", ptsProvider.getNumberOfMethods(), ptsProvider.getNumberOfSparkMethods());
outputer.printf("Reachable User Methods: %d (%d)\n", ptsProvider.n_reach_user_methods, ptsProvider.n_reach_spark_user_methods);
outputer.println("#All Pointers: " + ptsProvider.getNumberOfPointers());
outputer.println("#Core Pointers: " + n_legal_var + ", in which #AllocDot Fields: " + n_alloc_dot_fields);
outputer.printf("Total/Average Projected Points-to Tuples [core pointers]: %d (%d) / %.3f (%.3f) \n", evalRes.total_geom_ins_pts, evalRes.total_spark_pts, evalRes.avg_geom_ins_pts, evalRes.avg_spark_pts);
outputer.println("The largest points-to set size [core pointers]: " + evalRes.max_pts_geom + " (" + evalRes.max_pts_spark + ")");
outputer.println();
evalRes.pts_size_bar_geom.printResult(outputer, "Points-to Set Sizes Distribution [core pointers]:", evalRes.pts_size_bar_spark);
}
use of soot.jimple.spark.pag.AllocDotField in project soot by Sable.
the class GeomQueries method kCFA.
/**
* Standard K-CFA querying for field expression.
*
* @param callEdgeChain: callEdgeChain[0] is the farthest call edge in the chain.
* @param l
* @param field
* @param visitor
* @return
*/
@SuppressWarnings("rawtypes")
public boolean kCFA(Edge[] callEdgeChain, Local l, SparkField field, PtSensVisitor visitor) {
// We first obtain the points-to information for l
Obj_full_extractor pts_l = new Obj_full_extractor();
if (kCFA(callEdgeChain, l, pts_l) == false)
return false;
// We compute the points-to information for l.field
visitor.prepare();
for (IntervalContextVar icv : pts_l.outList) {
AllocNode obj = (AllocNode) icv.var;
AllocDotField obj_f = geomPTA.findAllocDotField(obj, field);
if (obj_f == null)
continue;
IVarAbstraction objField = geomPTA.findInternalNode(obj_f);
if (objField == null)
continue;
long L = icv.L;
long R = icv.R;
assert L < R;
objField.get_all_context_sensitive_objects(L, R, visitor);
}
pts_l = null;
visitor.finish();
return visitor.numOfDiffObjects() != 0;
}
Aggregations