use of org.batfish.datamodel.questions.InvalidReachabilitySettingsException in project batfish by batfish.
the class Batfish method reducedReachability.
@Override
public AnswerElement reducedReachability(ReachabilitySettings reachabilitySettings) {
Settings settings = getSettings();
checkDifferentialDataPlaneQuestionDependencies();
String tag = getDifferentialFlowTag();
// load base configurations and generate base data plane
pushBaseEnvironment();
Map<String, Configuration> baseConfigurations = loadConfigurations();
Synthesizer baseDataPlaneSynthesizer = synthesizeDataPlane();
popEnvironment();
// load diff configurations and generate diff data plane
pushDeltaEnvironment();
Map<String, Configuration> diffConfigurations = loadConfigurations();
Synthesizer diffDataPlaneSynthesizer = synthesizeDataPlane();
popEnvironment();
Set<String> ingressNodes;
try {
ingressNodes = ImmutableSet.copyOf(Sets.intersection(reachabilitySettings.computeActiveIngressNodes(baseConfigurations), reachabilitySettings.computeActiveIngressNodes(diffConfigurations)));
} catch (InvalidReachabilitySettingsException e) {
return e.getInvalidSettingsAnswer();
}
pushDeltaEnvironment();
SortedSet<String> blacklistNodes = getNodeBlacklist();
Set<NodeInterfacePair> blacklistInterfaces = getInterfaceBlacklist();
SortedSet<Edge> blacklistEdges = getEdgeBlacklist();
popEnvironment();
BlacklistDstIpQuerySynthesizer blacklistQuery = new BlacklistDstIpQuerySynthesizer(null, blacklistNodes, blacklistInterfaces, blacklistEdges, baseConfigurations);
// compute composite program and flows
List<Synthesizer> synthesizers = ImmutableList.of(baseDataPlaneSynthesizer, diffDataPlaneSynthesizer, baseDataPlaneSynthesizer);
// generate base reachability and diff blackhole and blacklist queries
List<CompositeNodJob> jobs = ingressNodes.stream().flatMap(node -> baseConfigurations.get(node).getVrfs().keySet().stream().map(vrf -> {
Map<String, Set<String>> ingressNodeVrfs = ImmutableMap.of(node, ImmutableSet.of(vrf));
StandardReachabilityQuerySynthesizer acceptQuery = StandardReachabilityQuerySynthesizer.builder().setActions(ImmutableSet.of(ForwardingAction.ACCEPT, ForwardingAction.NEIGHBOR_UNREACHABLE_OR_EXITS_NETWORK)).setHeaderSpace(reachabilitySettings.getHeaderSpace()).setIngressNodeVrfs(ingressNodeVrfs).setFinalNodes(ImmutableSet.of()).setTransitNodes(ImmutableSet.of()).setNonTransitNodes(ImmutableSet.of()).setSrcNatted(reachabilitySettings.getSrcNatted()).build();
StandardReachabilityQuerySynthesizer notAcceptQuery = StandardReachabilityQuerySynthesizer.builder().setActions(ImmutableSet.of(ForwardingAction.ACCEPT, ForwardingAction.NEIGHBOR_UNREACHABLE_OR_EXITS_NETWORK)).setHeaderSpace(new HeaderSpace()).setIngressNodeVrfs(ingressNodeVrfs).setFinalNodes(ImmutableSet.of()).setTransitNodes(ImmutableSet.of()).setNonTransitNodes(ImmutableSet.of()).build();
notAcceptQuery.setNegate(true);
SortedSet<Pair<String, String>> nodes = ImmutableSortedSet.of(new Pair<>(node, vrf));
List<QuerySynthesizer> queries = ImmutableList.of(acceptQuery, notAcceptQuery, blacklistQuery);
return new CompositeNodJob(settings, synthesizers, queries, nodes, tag);
})).collect(Collectors.toList());
// TODO: maybe do something with nod answer element
Set<Flow> flows = computeCompositeNodOutput(jobs, new NodAnswerElement());
pushBaseEnvironment();
getDataPlanePlugin().processFlows(flows, loadDataPlane());
popEnvironment();
pushDeltaEnvironment();
getDataPlanePlugin().processFlows(flows, loadDataPlane());
popEnvironment();
AnswerElement answerElement = getHistory();
return answerElement;
}
use of org.batfish.datamodel.questions.InvalidReachabilitySettingsException in project batfish by batfish.
the class Batfish method singleReachability.
private AnswerElement singleReachability(ReachabilitySettings reachabilitySettings, ReachabilityQuerySynthesizer.Builder<?, ?> builder) {
Settings settings = getSettings();
String tag = getFlowTag(_testrigSettings);
Set<ForwardingAction> actions = reachabilitySettings.getActions();
boolean useCompression = reachabilitySettings.getUseCompression();
// specialized compression
/*
CompressDataPlaneResult compressionResult =
useCompression ? computeCompressedDataPlane(headerSpace) : null;
Map<String, Configuration> configurations =
useCompression ? compressionResult._compressedConfigs : loadConfigurations();
DataPlane dataPlane = useCompression ? compressionResult._compressedDataPlane : loadDataPlane();
*/
// general compression
Snapshot snapshot = getSnapshot();
Map<String, Configuration> configurations = useCompression ? loadCompressedConfigurations(snapshot) : loadConfigurations(snapshot);
DataPlane dataPlane = loadDataPlane(useCompression);
if (configurations == null) {
throw new BatfishException("error loading configurations");
}
if (dataPlane == null) {
throw new BatfishException("error loading data plane");
}
Set<String> activeIngressNodes;
Set<String> activeFinalNodes;
HeaderSpace headerSpace;
Set<String> transitNodes;
Set<String> nonTransitNodes;
int maxChunkSize;
try {
activeIngressNodes = reachabilitySettings.computeActiveIngressNodes(configurations);
activeFinalNodes = reachabilitySettings.computeActiveFinalNodes(configurations);
headerSpace = reachabilitySettings.getHeaderSpace();
transitNodes = reachabilitySettings.computeActiveTransitNodes(configurations);
nonTransitNodes = reachabilitySettings.computeActiveNonTransitNodes(configurations);
maxChunkSize = reachabilitySettings.getMaxChunkSize();
reachabilitySettings.validateTransitNodes(configurations);
} catch (InvalidReachabilitySettingsException e) {
return e.getInvalidSettingsAnswer();
}
List<Pair<String, String>> originateNodeVrfs = activeIngressNodes.stream().flatMap(ingressNode -> configurations.get(ingressNode).getVrfs().keySet().stream().map(ingressVrf -> new Pair<>(ingressNode, ingressVrf))).collect(Collectors.toList());
int chunkSize = Math.max(1, Math.min(maxChunkSize, originateNodeVrfs.size() / _settings.getAvailableThreads()));
// partition originateNodeVrfs into chunks
List<List<Pair<String, String>>> originateNodeVrfChunks = Lists.partition(originateNodeVrfs, chunkSize);
Synthesizer dataPlaneSynthesizer = synthesizeDataPlane(configurations, dataPlane, loadForwardingAnalysis(configurations, dataPlane), headerSpace, reachabilitySettings.getSpecialize());
// build query jobs
List<NodJob> jobs = originateNodeVrfChunks.stream().map(ImmutableSortedSet::copyOf).map(nodeVrfs -> {
SortedMap<String, Set<String>> vrfsByNode = new TreeMap<>();
nodeVrfs.forEach(nodeVrf -> {
String node = nodeVrf.getFirst();
String vrf = nodeVrf.getSecond();
vrfsByNode.computeIfAbsent(node, key -> new TreeSet<>());
vrfsByNode.get(node).add(vrf);
});
ReachabilityQuerySynthesizer query = builder.setActions(actions).setHeaderSpace(headerSpace).setFinalNodes(activeFinalNodes).setIngressNodeVrfs(vrfsByNode).setTransitNodes(transitNodes).setNonTransitNodes(nonTransitNodes).setSrcNatted(reachabilitySettings.getSrcNatted()).build();
return new NodJob(settings, dataPlaneSynthesizer, query, nodeVrfs, tag, reachabilitySettings.getSpecialize());
}).collect(Collectors.toList());
// run jobs and get resulting flows
Set<Flow> flows = computeNodOutput(jobs);
getDataPlanePlugin().processFlows(flows, loadDataPlane());
AnswerElement answerElement = getHistory();
return answerElement;
}
Aggregations