use of org.checkerframework.dataflow.cfg.block.Block in project checker-framework by typetools.
the class BackwardAnalysisImpl method runAnalysisFor.
@Override
public S runAnalysisFor(@FindDistinct Node node, Analysis.BeforeOrAfter preOrPost, TransferInput<V, S> blockTransferInput, IdentityHashMap<Node, V> nodeValues, Map<TransferInput<V, S>, IdentityHashMap<Node, TransferResult<V, S>>> analysisCaches) {
Block block = node.getBlock();
assert block != null : "@AssumeAssertion(nullness): invariant";
Node oldCurrentNode = currentNode;
if (isRunning) {
assert currentInput != null : "@AssumeAssertion(nullness): invariant";
return currentInput.getRegularStore();
}
isRunning = true;
try {
switch(block.getType()) {
case REGULAR_BLOCK:
{
RegularBlock rBlock = (RegularBlock) block;
// Apply transfer function to contents until we found the node we are looking for.
TransferInput<V, S> store = blockTransferInput;
List<Node> nodeList = rBlock.getNodes();
ListIterator<Node> reverseIter = nodeList.listIterator(nodeList.size());
while (reverseIter.hasPrevious()) {
Node n = reverseIter.previous();
setCurrentNode(n);
if (n == node && preOrPost == Analysis.BeforeOrAfter.AFTER) {
return store.getRegularStore();
}
// Copy the store to avoid changing other blocks' transfer inputs in
// {@link #inputs}
TransferResult<V, S> transferResult = callTransferFunction(n, store.copy());
if (n == node) {
return transferResult.getRegularStore();
}
store = new TransferInput<>(n, this, transferResult);
}
throw new BugInCF("node %s is not in node.getBlock()=%s", node, block);
}
case EXCEPTION_BLOCK:
{
ExceptionBlock eb = (ExceptionBlock) block;
if (eb.getNode() != node) {
throw new BugInCF("Node should be equal to eb.getNode(). But get: node: " + node + "\teb.getNode(): " + eb.getNode());
}
if (preOrPost == Analysis.BeforeOrAfter.AFTER) {
return blockTransferInput.getRegularStore();
}
setCurrentNode(node);
// Copy the store to avoid changing other blocks' transfer inputs in {@link #inputs}
TransferResult<V, S> transferResult = callTransferFunction(node, blockTransferInput.copy());
// Merge transfer result with the exception store of this exceptional block
S exceptionStore = exceptionStores.get(eb);
return exceptionStore == null ? transferResult.getRegularStore() : transferResult.getRegularStore().leastUpperBound(exceptionStore);
}
default:
// Only regular blocks and exceptional blocks can hold nodes.
throw new BugInCF("Unexpected block type: " + block.getType());
}
} finally {
setCurrentNode(oldCurrentNode);
isRunning = false;
}
}
use of org.checkerframework.dataflow.cfg.block.Block in project checker-framework by typetools.
the class CFGTranslationPhaseThree method computeNeighborhoodOfEmptyBlockBackwards.
/**
* Compute the set of empty regular basic blocks {@code emptyBlocks}, starting at {@code start}
* and looking only backwards in the control flow graph. Furthermore, compute the predecessors of
* these empty blocks ({@code predecessors}).
*
* @param start the starting point of the search (an empty, regular basic block)
* @param emptyBlocks a set to be filled by this method with all empty basic blocks found
* (including {@code start}).
* @param predecessors a set to be filled by this method with all predecessors
*/
protected static void computeNeighborhoodOfEmptyBlockBackwards(RegularBlockImpl start, Set<RegularBlockImpl> emptyBlocks, Set<PredecessorHolder> predecessors) {
RegularBlockImpl cur = start;
emptyBlocks.add(cur);
for (final Block p : cur.getPredecessors()) {
BlockImpl pred = (BlockImpl) p;
switch(pred.getType()) {
case SPECIAL_BLOCK:
// add pred correctly to predecessor list
predecessors.add(getPredecessorHolder(pred, cur));
break;
case CONDITIONAL_BLOCK:
// add pred correctly to predecessor list
predecessors.add(getPredecessorHolder(pred, cur));
break;
case EXCEPTION_BLOCK:
// add pred correctly to predecessor list
predecessors.add(getPredecessorHolder(pred, cur));
break;
case REGULAR_BLOCK:
RegularBlockImpl r = (RegularBlockImpl) pred;
if (r.isEmpty()) {
// recursively look backwards
if (!emptyBlocks.contains(r)) {
computeNeighborhoodOfEmptyBlockBackwards(r, emptyBlocks, predecessors);
}
} else {
// add pred correctly to predecessor list
predecessors.add(getPredecessorHolder(pred, cur));
}
break;
}
}
}
use of org.checkerframework.dataflow.cfg.block.Block in project checker-framework by typetools.
the class ForwardAnalysisImpl method performAnalysis.
@Override
public void performAnalysis(ControlFlowGraph cfg) {
if (isRunning) {
throw new BugInCF("ForwardAnalysisImpl::performAnalysis() shouldn't be called when the analysis is" + " running.");
}
isRunning = true;
try {
init(cfg);
while (!worklist.isEmpty()) {
Block b = worklist.poll();
performAnalysisBlock(b);
}
} finally {
assert isRunning;
// In case performAnalysisBlock crashed, reset isRunning to false.
isRunning = false;
}
}
use of org.checkerframework.dataflow.cfg.block.Block in project checker-framework by typetools.
the class ControlFlowGraph method getAllBlocks.
/**
* Returns the set of all basic blocks in this control flow graph.
*
* @return the set of all basic blocks in this control flow graph
*/
public Set<Block> getAllBlocks(@UnknownInitialization(ControlFlowGraph.class) ControlFlowGraph this, ) {
Set<Block> visited = new HashSet<>();
// worklist is always a subset of visited; any block in worklist is also in visited.
Queue<Block> worklist = new ArrayDeque<>();
Block cur = entryBlock;
visited.add(entryBlock);
// traverse the whole control flow graph
while (true) {
if (cur == null) {
break;
}
for (Block b : cur.getSuccessors()) {
if (visited.add(b)) {
worklist.add(b);
}
}
cur = worklist.poll();
}
return visited;
}
use of org.checkerframework.dataflow.cfg.block.Block in project bazel by bazelbuild.
the class CFGDOTVisualizer method getProcessOrder.
private static IdentityHashMap<Block, List<Integer>> getProcessOrder(ControlFlowGraph cfg) {
IdentityHashMap<Block, List<Integer>> depthFirstOrder = new IdentityHashMap<>();
int count = 1;
for (Block b : cfg.getDepthFirstOrderedBlocks()) {
if (depthFirstOrder.get(b) == null) {
depthFirstOrder.put(b, new ArrayList<Integer>());
}
depthFirstOrder.get(b).add(count++);
}
return depthFirstOrder;
}
Aggregations