use of soot.jimple.spark.geom.dataRep.SimpleInterval in project soot by Sable.
the class ContextsCollector method insert.
public boolean insert(long L, long R) {
backupList.clear();
// We search the list and merge the intersected intervals
tmp_si.L = L;
tmp_si.R = R;
long minL = L;
long maxR = R;
for (SimpleInterval old_si : bars) {
if (old_si.contains(tmp_si)) {
// We keep the context intervals disjoint
return false;
}
if (!tmp_si.merge(old_si)) {
if (old_si.L < minL)
minL = old_si.L;
if (old_si.R > maxR)
maxR = old_si.R;
backupList.add(old_si);
}
}
// We switch the backup list with the original list
List<SimpleInterval> tmpList = backupList;
backupList = bars;
bars = tmpList;
SimpleInterval new_si = new SimpleInterval(tmp_si);
bars.add(new_si);
// Merge the intervals
if (nBudget != -1 && bars.size() > nBudget) {
bars.clear();
new_si.L = minL;
new_si.R = maxR;
bars.add(new_si);
}
return true;
}
use of soot.jimple.spark.geom.dataRep.SimpleInterval in project soot by Sable.
the class GeomQueries method contextsGoBy.
/**
* Answer contexts-go-by query.
*
* Usually, users specify the last K paths as the context. We call it k-CFA context.
* However, k-CFA is too restrictive.
* In contexts-go-by query, user specifies arbitrary call edge in the call graph.
* The query searches for all contexts induced by the specified call edge and collect points-to results under these contexts.
*
* @param sootEdge: the specified context edge in soot edge format
* @param l: the querying pointer
* @param visitor: container for querying result
* @return false, l does not have points-to information under the contexts induced by the given call edge
*/
@SuppressWarnings("rawtypes")
public boolean contextsGoBy(Edge sootEdge, Local l, PtSensVisitor visitor) {
// Obtain the internal representation of specified context
CgEdge ctxt = geomPTA.getInternalEdgeFromSootEdge(sootEdge);
if (ctxt == null || ctxt.is_obsoleted == true)
return false;
// Obtain the internal representation for querying pointer
LocalVarNode vn = geomPTA.findLocalVarNode(l);
if (vn == null) {
// Normally this could not happen, perhaps it's a bug
return false;
}
IVarAbstraction pn = geomPTA.findInternalNode(vn);
if (pn == null) {
// This pointer is no longer reachable
return false;
}
pn = pn.getRepresentative();
if (!pn.hasPTResult())
return false;
// Obtain the internal representation of the method that encloses the querying pointer
SootMethod sm = vn.getMethod();
int target = geomPTA.getIDFromSootMethod(sm);
if (target == -1)
return false;
// Start call graph traversal
long L = ctxt.map_offset;
long R = L + max_context_size_block[rep_cg[ctxt.s]];
assert L < R;
visitor.prepare();
prepareIntervalPropagations();
if (propagateIntervals(ctxt.t, L, R, target)) {
// We calculate the points-to results
ContextsCollector targetContexts = contextsForMethods[target];
for (SimpleInterval si : targetContexts.bars) {
assert si.L < si.R;
pn.get_all_context_sensitive_objects(si.L, si.R, visitor);
}
// Reset
targetContexts.clear();
}
visitor.finish();
return visitor.numOfDiffObjects() != 0;
}
use of soot.jimple.spark.geom.dataRep.SimpleInterval in project soot by Sable.
the class GeomQueries method propagateIntervals.
/**
* Compute the mapping from interval [L, R) of method start to the intervals of method target.
* Return true if the mapping is feasible.
*
* @param start
* @param L
* @param R
* @param target
* @return
*/
protected boolean propagateIntervals(int start, long L, long R, int target) {
// We first identify the subgraph, where all edges in the subgraph lead to the target
if (!dfsScanSubgraph(start, target))
return false;
// Now we prepare for iteration
int rep_start = rep_cg[start];
int rep_target = rep_cg[target];
ContextsCollector targetContexts = contextsForMethods[target];
if (rep_start == rep_target) {
// Fast path for the special case
transferInSCC(start, target, L, R, targetContexts);
} else {
// We start traversal from the representative method
transferInSCC(start, rep_start, L, R, contextsForMethods[rep_start]);
// Start topsort
topQ.clear();
topQ.add(rep_start);
while (!topQ.isEmpty()) {
// Every function in the queue is representative function
int s = topQ.poll();
ContextsCollector sContexts = contextsForMethods[s];
// Loop over the edges
CgEdge p = call_graph[s];
while (p != null) {
int t = p.t;
int rep_t = rep_cg[t];
if (in_degree[rep_t] != 0) {
// This node has a path to target
ContextsCollector reptContexts = contextsForMethods[rep_t];
long block_size = max_context_size_block[s];
for (SimpleInterval si : sContexts.bars) {
// Compute the offset within the block for si
long in_block_offset = (si.L - 1) % block_size;
long newL = p.map_offset + in_block_offset;
long newR = si.R - si.L + newL;
if (rep_t == rep_target) {
// t and target are in the same SCC
// We directly transfer this context interval to target
transferInSCC(t, target, newL, newR, targetContexts);
} else {
// We transfer this interval to its SCC representative
// It might be t == rep_t
transferInSCC(t, rep_t, newL, newR, reptContexts);
}
}
if (--in_degree[rep_t] == 0 && rep_t != rep_target) {
topQ.add(rep_t);
}
}
p = p.next;
}
sContexts.clear();
}
}
return true;
}
Aggregations