use of soot.jimple.spark.geom.geomPA.IVarAbstraction in project soot by Sable.
the class EvalResults method checkCastsSafety.
/**
* Count how many static casts can be determined safe.
*/
public void checkCastsSafety() {
for (SootMethod sm : ptsProvider.getAllReachableMethods()) {
if (sm.isJavaLibraryMethod())
continue;
if (!sm.isConcrete())
continue;
if (!sm.hasActiveBody()) {
sm.retrieveActiveBody();
}
if (!ptsProvider.isValidMethod(sm))
continue;
// All the statements in the method
for (Iterator<Unit> stmts = sm.getActiveBody().getUnits().iterator(); stmts.hasNext(); ) {
Stmt st = (Stmt) stmts.next();
if (st instanceof AssignStmt) {
Value rhs = ((AssignStmt) st).getRightOp();
Value lhs = ((AssignStmt) st).getLeftOp();
if (rhs instanceof CastExpr && lhs.getType() instanceof RefLikeType) {
Value v = ((CastExpr) rhs).getOp();
VarNode node = ptsProvider.findLocalVarNode(v);
if (node == null)
continue;
IVarAbstraction pn = ptsProvider.findInternalNode(node);
if (pn == null)
continue;
pn = pn.getRepresentative();
if (!pn.hasPTResult())
continue;
evalRes.total_casts++;
final Type targetType = (RefLikeType) ((CastExpr) rhs).getCastType();
// We first use the geometric points-to result to
// evaluate
solved = true;
Set<AllocNode> set = pn.get_all_points_to_objects();
for (AllocNode obj : set) {
solved = ptsProvider.castNeverFails(obj.getType(), targetType);
if (solved == false)
break;
}
if (solved)
evalRes.geom_solved_casts++;
// Second is the SPARK result
solved = true;
node.getP2Set().forall(new P2SetVisitor() {
public void visit(Node arg0) {
if (solved == false)
return;
solved = ptsProvider.castNeverFails(arg0.getType(), targetType);
}
});
if (solved)
evalRes.spark_solved_casts++;
}
}
}
}
ptsProvider.ps.println();
ptsProvider.ps.println("-----------> Static Casts Safety Evaluation <------------");
ptsProvider.ps.println("Total casts (app code): " + evalRes.total_casts);
ptsProvider.ps.println("Safe casts: Geom = " + evalRes.geom_solved_casts + ", SPARK = " + evalRes.spark_solved_casts);
}
use of soot.jimple.spark.geom.geomPA.IVarAbstraction in project soot by Sable.
the class EvalResults method profileGeomBasicMetrics.
/**
* Summarize the geometric points-to analysis and report the basic metrics.
*/
public void profileGeomBasicMetrics(boolean testSpark) {
int n_legal_var = 0, n_alloc_dot_fields = 0;
int[] limits = new int[] { 1, 5, 10, 25, 50, 75, 100 };
evalRes.pts_size_bar_geom = new Histogram(limits);
if (testSpark) {
evalRes.total_spark_pts = 0;
evalRes.max_pts_spark = 0;
evalRes.pts_size_bar_spark = new Histogram(limits);
}
// We first count the LOC
for (SootMethod sm : ptsProvider.getAllReachableMethods()) {
if (!sm.isConcrete())
continue;
if (!sm.hasActiveBody()) {
sm.retrieveActiveBody();
}
evalRes.loc += sm.getActiveBody().getUnits().size();
}
for (IVarAbstraction pn : ptsProvider.pointers) {
// points-to information is equivalent to SPARK
if (!pn.hasPTResult())
continue;
pn = pn.getRepresentative();
Node var = pn.getWrappedNode();
if (ptsProvider.isExceptionPointer(var))
continue;
if (var instanceof AllocDotField) {
++n_alloc_dot_fields;
}
++n_legal_var;
// ...spark
int size;
if (testSpark) {
size = var.getP2Set().size();
evalRes.pts_size_bar_spark.addNumber(size);
evalRes.total_spark_pts += size;
if (size > evalRes.max_pts_spark)
evalRes.max_pts_spark = size;
}
// ...geom
size = pn.num_of_diff_objs();
evalRes.pts_size_bar_geom.addNumber(size);
evalRes.total_geom_ins_pts += size;
if (size > evalRes.max_pts_geom)
evalRes.max_pts_geom = size;
}
evalRes.avg_geom_ins_pts = (double) evalRes.total_geom_ins_pts / n_legal_var;
if (testSpark)
evalRes.avg_spark_pts = (double) evalRes.total_spark_pts / n_legal_var;
outputer.println("");
outputer.println("----------Statistical Result of geomPTA <Data Format: geomPTA (SPARK)>----------");
outputer.printf("Lines of code (jimple): %.1fK\n", (double) evalRes.loc / 1000);
outputer.printf("Reachable Methods: %d (%d)\n", ptsProvider.getNumberOfMethods(), ptsProvider.getNumberOfSparkMethods());
outputer.printf("Reachable User Methods: %d (%d)\n", ptsProvider.n_reach_user_methods, ptsProvider.n_reach_spark_user_methods);
outputer.println("#All Pointers: " + ptsProvider.getNumberOfPointers());
outputer.println("#Core Pointers: " + n_legal_var + ", in which #AllocDot Fields: " + n_alloc_dot_fields);
outputer.printf("Total/Average Projected Points-to Tuples [core pointers]: %d (%d) / %.3f (%.3f) \n", evalRes.total_geom_ins_pts, evalRes.total_spark_pts, evalRes.avg_geom_ins_pts, evalRes.avg_spark_pts);
outputer.println("The largest points-to set size [core pointers]: " + evalRes.max_pts_geom + " (" + evalRes.max_pts_spark + ")");
outputer.println();
evalRes.pts_size_bar_geom.printResult(outputer, "Points-to Set Sizes Distribution [core pointers]:", evalRes.pts_size_bar_spark);
}
use of soot.jimple.spark.geom.geomPA.IVarAbstraction in project soot by Sable.
the class HeapInsNodeGenerator method initFlowGraph.
@Override
public void initFlowGraph(GeomPointsTo ptAnalyzer) {
int k;
int n_legal_cons;
int nf1, nf2;
int code;
IVarAbstraction my_lhs, my_rhs;
// Visit all the simple constraints
n_legal_cons = 0;
for (PlainConstraint cons : ptAnalyzer.constraints) {
if (!cons.isActive)
continue;
my_lhs = cons.getLHS().getRepresentative();
my_rhs = cons.getRHS().getRepresentative();
nf1 = ptAnalyzer.getMethodIDFromPtr(my_lhs);
nf2 = ptAnalyzer.getMethodIDFromPtr(my_rhs);
// Test how many globals are in this constraint
code = ((nf1 == Constants.SUPER_MAIN ? 1 : 0) << 1) | (nf2 == Constants.SUPER_MAIN ? 1 : 0);
switch(cons.type) {
case Constants.NEW_CONS:
// We directly add the objects to the points-to set
my_rhs.add_points_to_3((AllocNode) my_lhs.getWrappedNode(), // to decide if the receiver is a global or not
(code & 1) == 1 ? 0 : 1, // if the object is a global or not
(code >> 1) == 1 ? 0 : 1, (code & 1) == 1 ? ptAnalyzer.context_size[nf1] : ptAnalyzer.context_size[nf2]);
// Enqueue to the worklist
ptAnalyzer.getWorklist().push(my_rhs);
break;
case Constants.ASSIGN_CONS:
// The core part of any context sensitive algorithms
if (cons.interCallEdges != null) {
// Inter-procedural assignment
for (Iterator<Edge> it = cons.interCallEdges.iterator(); it.hasNext(); ) {
Edge sEdge = it.next();
CgEdge q = ptAnalyzer.getInternalEdgeFromSootEdge(sEdge);
if (q.is_obsoleted == true) {
continue;
}
// Parameter passing
if (nf2 == q.t) {
if (nf1 == Constants.SUPER_MAIN) {
my_lhs.add_simple_constraint_3(my_rhs, 0, q.map_offset, ptAnalyzer.max_context_size_block[q.s]);
} else {
// We should treat the self recursive calls specially
if (q.s == q.t) {
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf1]);
} else {
for (k = 0; k < ptAnalyzer.block_num[nf1]; ++k) {
my_lhs.add_simple_constraint_3(my_rhs, k * ptAnalyzer.max_context_size_block[nf1] + 1, q.map_offset, ptAnalyzer.max_context_size_block[nf1]);
}
}
}
} else {
if (q.s == q.t) {
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf2]);
} else {
for (k = 0; k < ptAnalyzer.block_num[nf2]; ++k) {
my_lhs.add_simple_constraint_3(my_rhs, q.map_offset, k * ptAnalyzer.max_context_size_block[nf2] + 1, ptAnalyzer.max_context_size_block[nf2]);
}
}
}
}
} else {
// Intraprocedural
// And, assignment involves global variable goes here. By
// definition, global variables belong to SUPER_MAIN.
// By the Jimple IR, not both sides are global variables
my_lhs.add_simple_constraint_3(my_rhs, nf1 == Constants.SUPER_MAIN ? 0 : 1, nf2 == Constants.SUPER_MAIN ? 0 : 1, nf1 == Constants.SUPER_MAIN ? ptAnalyzer.context_size[nf2] : ptAnalyzer.context_size[nf1]);
}
break;
case Constants.LOAD_CONS:
// lhs is always a local
// rhs = lhs.f
cons.code = full_convertor[code];
cons.otherSide = my_rhs;
my_lhs.put_complex_constraint(cons);
break;
case Constants.STORE_CONS:
// rhs is always a local
// rhs.f = lhs
cons.code = full_convertor[code];
cons.otherSide = my_lhs;
my_rhs.put_complex_constraint(cons);
break;
default:
throw new RuntimeException("Invalid node type");
}
++n_legal_cons;
}
ptAnalyzer.ps.printf("Only %d (%.1f%%) constraints are needed for this run.\n", n_legal_cons, ((double) n_legal_cons / ptAnalyzer.n_init_constraints) * 100);
}
use of soot.jimple.spark.geom.geomPA.IVarAbstraction in project soot by Sable.
the class FullSensitiveNodeGenerator method initFlowGraph.
@Override
public void initFlowGraph(GeomPointsTo ptAnalyzer) {
int k;
int n_legal_cons;
int nf1, nf2;
int code;
IVarAbstraction my_lhs, my_rhs;
// Visit all the simple constraints
n_legal_cons = 0;
for (PlainConstraint cons : ptAnalyzer.constraints) {
if (!cons.isActive)
continue;
my_lhs = cons.getLHS().getRepresentative();
my_rhs = cons.getRHS().getRepresentative();
nf1 = ptAnalyzer.getMethodIDFromPtr(my_lhs);
nf2 = ptAnalyzer.getMethodIDFromPtr(my_rhs);
// Test how many globals are in this constraint
code = ((nf1 == Constants.SUPER_MAIN ? 1 : 0) << 1) | (nf2 == Constants.SUPER_MAIN ? 1 : 0);
switch(cons.type) {
case Constants.NEW_CONS:
if (code == 0) {
// the allocation result is assigned to a local variable
my_rhs.add_points_to_3((AllocNode) my_lhs.getWrappedNode(), 1, 1, ptAnalyzer.context_size[nf1]);
} else {
// Assigned to a global or the object itself is a global
my_rhs.add_points_to_4((AllocNode) my_lhs.getWrappedNode(), 1, 1, ptAnalyzer.context_size[nf2], ptAnalyzer.context_size[nf1]);
}
// Enqueue to the worklist
ptAnalyzer.getWorklist().push(my_rhs);
break;
case Constants.ASSIGN_CONS:
if (cons.interCallEdges != null) {
// Inter-procedural assignment (parameter passing, function return)
for (Iterator<Edge> it = cons.interCallEdges.iterator(); it.hasNext(); ) {
Edge sEdge = it.next();
CgEdge q = ptAnalyzer.getInternalEdgeFromSootEdge(sEdge);
if (q.is_obsoleted == true) {
continue;
}
// Parameter passing or not
if (nf2 == q.t) {
// In that case, nf1 is SUPER_MAIN.
if (nf1 == Constants.SUPER_MAIN) {
my_lhs.add_simple_constraint_4(my_rhs, 1, q.map_offset, 1, ptAnalyzer.max_context_size_block[q.s]);
} else {
// We should treat the self recursive calls specially
if (q.s == q.t) {
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf1]);
} else {
for (k = 0; k < ptAnalyzer.block_num[nf1]; ++k) {
my_lhs.add_simple_constraint_3(my_rhs, k * ptAnalyzer.max_context_size_block[nf1] + 1, q.map_offset, ptAnalyzer.max_context_size_block[nf1]);
}
}
}
} else {
if (q.s == q.t) {
// Self-recursive calls may fall here, we handle them properly
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf2]);
} else {
for (k = 0; k < ptAnalyzer.block_num[nf2]; ++k) {
my_lhs.add_simple_constraint_3(my_rhs, q.map_offset, k * ptAnalyzer.max_context_size_block[nf2] + 1, ptAnalyzer.max_context_size_block[nf2]);
}
}
}
}
} else {
if (code == 0) {
// local to local assignment
my_lhs.add_simple_constraint_3(my_rhs, 1, 1, ptAnalyzer.context_size[nf1]);
} else {
my_lhs.add_simple_constraint_4(my_rhs, 1, 1, ptAnalyzer.context_size[nf1], ptAnalyzer.context_size[nf2]);
}
}
break;
case Constants.LOAD_CONS:
// lhs is always a local
// rhs = lhs.f
cons.code = full_convertor[code];
cons.otherSide = my_rhs;
my_lhs.put_complex_constraint(cons);
break;
case Constants.STORE_CONS:
// rhs is always a local
// rhs.f = lhs
cons.code = full_convertor[code];
cons.otherSide = my_lhs;
my_rhs.put_complex_constraint(cons);
break;
default:
throw new RuntimeException("Invalid type");
}
++n_legal_cons;
}
ptAnalyzer.ps.printf("Only %d (%.1f%%) constraints are needed for this run.\n", n_legal_cons, ((double) n_legal_cons / ptAnalyzer.n_init_constraints) * 100);
}
Aggregations