use of easik.model.path.ModelPath in project fql by CategoricalData.
the class Model method isCommutativeDiagram.
/**
* Checks whether the current selected paths form a commutative diagram.
* This is determined by checking whether the paths participating in the
* commutative diagram have the same domain and codomain, and also by making
* sure that each path is different.
*
* @param inPaths
* The list of paths participating in the commutative diagram
* @return true if it forms a valid commutative diagram, false otherwise
*/
public boolean isCommutativeDiagram(List<ModelPath<F, GM, M, N, E>> inPaths) {
if (inPaths.size() < 2) {
// Not a CD if it's a single path
return false;
}
ArrayList<ModelPath<F, GM, M, N, E>> paths = new ArrayList<>(inPaths);
ModelPath<F, GM, M, N, E> firstPath = paths.get(0);
for (int i = 0; i < paths.size(); i++) {
ModelPath<F, GM, M, N, E> path = paths.get(i);
// all equal to each other).
if (// Don't both checking the first one, obviously it is
i > 0) // equal to itself
{
if (!firstPath.getDomain().equals(path.getDomain()) || !firstPath.getCoDomain().equals(path.getCoDomain())) {
return false;
}
}
// as this path:
for (int j = i + 1; j < paths.size(); j++) {
ModelPath<F, GM, M, N, E> otherPath = paths.get(j);
// We can skip the check if that path sizes aren't the same:
if (path.getEdges().size() == otherPath.getEdges().size()) {
boolean different = false;
int size = path.getEdges().size();
for (int k = 0; k < size; k++) {
if (!path.getEdges().get(k).equals(otherPath.getEdges().get(k))) {
// We found a difference, so we can abort this loop
different = true;
break;
}
}
if (!different) {
// Identical paths; not a valid CD.
return false;
}
}
}
}
// We got through all the checks, so this must be a valid CD.
return true;
}
use of easik.model.path.ModelPath in project fql by CategoricalData.
the class Model method asPullbackConstraint.
/**
* Method to determine whether a set of paths potentially forming a pullback
* constraint could legally form a pullback constraint. Additionally, if the
* pullback constraint is valid, this returns the passed-in list in the
* appropriate order,
*
* Note: This method does not currently care about the order that the paths
* are selected.
*
* Modified summer 2012 (CRF): The original (not 'wide') pullback constraint
* was hard coded (with 4 nested loops) to arrange the paths in order (i.e.
* exponential time). This is infeasible for a generalized pullback. This
* method now arranges the edges more efficiently: For each edge in the
* constraint, if we store a count of the number of times we see an N (by
* calling getDomain() and getCoDomain() on the path) in a hash map. Once
* all paths have been considered, the source and target will have each been
* encountered n times where n is the width of the constraint. The
* intermediate nodes will have been encountered 2 times. So it is possible
* to determine the constraints if the width is > 2.If the width = 2, I pass
* the paths to be dealt with by the original pullback method. Otherwise,
* the source and targets are identified to separate the path list into
* target and projection paths. This then lets us arrange the list as a
* pullback. _____________________________________________________ | Source
* P1 | Target P1 | Source P2 | Target P2 | ...
* |___________|___________|___________|___________|_____
*
* TODO All of this could be simplified by keeping track of which node is
* which upon creation...
*
* @param paths
* The set of potential paths
* @return Null if invalid pullback constraint, a valid pullback constraint
* ordering otherwise.
*/
public List<ModelPath<F, GM, M, N, E>> asPullbackConstraint(List<ModelPath<F, GM, M, N, E>> paths) {
if (// pass to original pullback check
paths.size() == 4) {
return asPullbackConstraintBaseCase(paths);
}
// make sure we have a valid number of paths
if ((paths.size() < 2) || (paths.size() % 2 != 0)) {
return null;
}
// determine source and target nodes
HashMap<N, Integer> counts = new HashMap<>();
for (ModelPath<F, GM, M, N, E> sk : paths) {
if (sk == null) {
return null;
}
if (!counts.containsKey(sk.getCoDomain())) {
counts.put(sk.getCoDomain(), new Integer(1));
} else {
counts.put(sk.getCoDomain(), counts.get(sk.getCoDomain()) + 1);
}
if (!counts.containsKey(sk.getDomain())) {
counts.put(sk.getDomain(), new Integer(1));
} else {
counts.put(sk.getDomain(), counts.get(sk.getDomain()) + 1);
}
}
// the potential source and target nodes, don't know which yet
N a = null;
N b = null;
Set<N> seen = counts.keySet();
for (N en : seen) {
if (counts.get(en) > 2) {
// a source or target node
if (a == null) {
a = en;
continue;
} else if (b == null) {
b = en;
continue;
} else {
// something went wrong
return null;
}
}
}
// now determine source and target from an arbitrary edge (0)
N source = null;
N target = null;
if (paths.get(0).getDomain() == a) {
source = a;
target = b;
} else if (paths.get(0).getDomain() == b) {
source = b;
target = a;
} else {
if (paths.get(0).getCoDomain() == a) {
target = a;
source = b;
} else if (paths.get(0).getCoDomain() == b) {
source = a;
target = b;
} else {
// something went wrong
return null;
}
}
// now separate the source and target paths knowing the source and
// target nodes
fromSource = new ArrayList<>();
ArrayList<ModelPath<F, GM, M, N, E>> toTarget = new ArrayList<>();
for (int i = 0; i < paths.size(); i++) {
if (paths.get(i).getDomain() == source) {
fromSource.add(paths.get(i));
} else {
toTarget.add(paths.get(i));
}
}
// should have two width sized lists
if ((fromSource.size() != toTarget.size()) || (fromSource.size() != paths.size() / 2)) {
return null;
}
// the ordered paths (will eventually be returned if constraint is
// valid)
List<ModelPath<F, GM, M, N, E>> orderedPaths = new ArrayList<>();
for (ModelPath<F, GM, M, N, E> p : toTarget) {
ModelPath<F, GM, M, N, E> skp = getProjectionPathFor(p);
if (skp == null) {
return null;
}
orderedPaths.add(skp);
orderedPaths.add(p);
}
// make sure we could match a target with every source
if (fromSource.size() != 0) {
return null;
}
// this will probably pass at this point, but it doesn't hurt to check
if (orderedPaths.size() != paths.size()) {
return null;
}
// now make sure we have a pullback arrangement that makes sense
for (int i = 0; i < orderedPaths.size(); i++) {
if (i % 2 == 1) {
if (orderedPaths.get(i).getCoDomain() != target) {
return null;
}
} else {
if (orderedPaths.get(i).getDomain() != source) {
return null;
}
}
}
// success
return orderedPaths;
}
use of easik.model.path.ModelPath in project fql by CategoricalData.
the class Model method asPullbackConstraintBaseCase.
/**
* Method to determine whether a set of paths potentially forming a pullback
* constraint could legally form a pullback constraint. Additionally, if the
* pullback constraint is valid, this rearranges the passed-in list to be in
* the appropriate order: path elements 0 and 1 form one path, elements 2
* and 3 form the other path. In other words, the domains of elements 0 and
* 2 are the same, and the codomains of elements 1 and 3 are the same.
*
* Note: This method does not currently care about the order that the paths
* are selected.
*
* @param paths
* The set of potential paths
* @return Null if invalid pullback constraint, a valid pullback constraint
* ordering otherwise.
* @since 2006-05-25 Vera Ranieri
*/
public List<ModelPath<F, GM, M, N, E>> asPullbackConstraintBaseCase(List<ModelPath<F, GM, M, N, E>> paths) {
if (// NOT GENERAL!
paths.size() != 4) {
return null;
}
for (ModelPath<F, GM, M, N, E> p : paths) {
if (p == null) {
return null;
}
if (!p.isFullyDefined()) {
return null;
}
}
// brute force search for a valid configuration
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 4; j++) {
if (j == i) {
continue;
}
for (int k = 0; k < 4; k++) {
if ((k == i) || (k == j)) {
continue;
}
for (int l = 0; l < 4; l++) {
if ((l == k) || (l == j) || (l == i)) {
continue;
}
ModelPath<F, GM, M, N, E> pathA = paths.get(i), pathB = paths.get(j), pathC = paths.get(k), pathD = paths.get(l);
N domA = pathA.getDomain(), domB = pathB.getDomain(), domC = pathC.getDomain(), domD = pathD.getDomain();
N codoA = pathA.getCoDomain(), codoB = pathB.getCoDomain(), codoC = pathC.getCoDomain(), codoD = pathD.getCoDomain();
try {
if ((codoA == domB) && (// "Left" paths connect
codoC == domD) && (// "Right" paths connect
codoB == codoD) && (// thing
domA == domC)) {
// thing
if (!(!pathB.getEdges().getFirst().isInjective() || (pathC.getEdges().getFirst().isInjective() && !pathD.getEdges().getFirst().isInjective()) || pathA.getEdges().getFirst().isInjective())) {
return null;
}
ArrayList<ModelPath<F, GM, M, N, E>> newOrderPaths = new ArrayList<>();
newOrderPaths.add(pathA);
newOrderPaths.add(pathB);
newOrderPaths.add(pathC);
newOrderPaths.add(pathD);
return newOrderPaths;
}
} catch (Exception e) {
return null;
}
}
}
}
}
return null;
}
use of easik.model.path.ModelPath in project fql by CategoricalData.
the class EntityNode method getShadowEdges.
/**
* This method returns the edges that will be "shadowed" in this entity for
* allowing various types of constraints. The problem arises when we have
* something like: A -> B -> C, where A is the summand of B, but B has
* to be specified. In this case, the B to C edge will be returned as a
* "shadow" edge. We handle this for other constraint types, too. For a
* good, working, technical example, see the shadowEdges.easik sample
* sketch.
*
* @return a set of edges that will be shadowed by this entity node.
*
* Removing shadow edges completely. Started by Sarah Van der Laan
* continued by Federico Mora because a partial solution is worse
* than all or nothing
*
* public LinkedHashSet<SketchEdge> getShadowEdges() { return
* getShadowEdges(new LinkedHashSet<EntityNode>(5), new
* LinkedHashSet<SketchEdge>(5)); }
*/
// Package-only implementation of the above that breaks recursion by
// ignoring
// shadowed nodes that we already know about.
/**
* @param ignore
* @param constraintEdges
*
* @return
*/
LinkedHashSet<SketchEdge> getShadowEdges(final Collection<EntityNode> ignore, final LinkedHashSet<SketchEdge> constraintEdges) {
// These are the other entity node that we (potentially) need to shadow:
final Collection<EntityNode> shadow = new LinkedHashSet<>(10);
CONSTRAINT: for (final ModelConstraint<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge> c : getMModel().getConstraints().values()) {
if (c instanceof SumConstraint) {
final SumConstraint<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge> s = (SumConstraint<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge>) c;
for (final ModelPath<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge> path : s.getPaths()) {
// for this entity, of course):
if (path.getDomain() == this) {
shadow.addAll(path.getEntities());
constraintEdges.addAll(path.getEdges());
continue CONSTRAINT;
}
}
} else if (c instanceof ProductConstraint) {
final ProductConstraint<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge> p = (ProductConstraint<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge>) c;
for (final ModelPath<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge> path : p.getPaths()) {
// codomains), along each product path
if (path.getCoDomain() == this) {
for (final ModelPath<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge> prodPath : p.getPaths()) {
// But we ignore all of the product path edges,
// since they will be automatically generated:
constraintEdges.addAll(prodPath.getEdges());
final Deque<EntityNode> pathNodes = new LinkedList<>();
pathNodes.addAll(prodPath.getEntities());
pathNodes.removeLast();
shadow.addAll(pathNodes);
}
continue CONSTRAINT;
}
}
} else if (c instanceof EqualizerConstraint) {
final EqualizerConstraint<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge> e = (EqualizerConstraint<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge>) c;
// injective *edge* doesn't cause that problem).
if (e.getSourceEntity() == this) {
final ModelPath<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge> projection = e.getProjection();
shadow.addAll(projection.getEntities());
// Ignore the projection edge itself:
constraintEdges.addAll(projection.getEdges());
continue CONSTRAINT;
}
} else if (c instanceof PullbackConstraint) {
final PullbackConstraint<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge> pb = (PullbackConstraint<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge>) c;
// WPBEDIT CF2012
for (int i = 0; i < pb.getWidth(); i++) {
ModelPath<SketchFrame, SketchGraphModel, Sketch, EntityNode, SketchEdge> proj = pb.getProjectionPath(i);
if (this == proj.getCoDomain()) {
for (int j = 0; j < pb.getWidth(); j++) {
proj = pb.getProjectionPath(j);
Deque<EntityNode> projNodes = new LinkedList<>(proj.getEntities());
projNodes.removeLast();
shadow.addAll(projNodes);
constraintEdges.addAll(proj.getEdges());
}
continue CONSTRAINT;
}
}
} else if (c instanceof LimitConstraint) {
// TRIANGLES TODO CF2012 incomplete
}
}
final LinkedHashSet<SketchEdge> shadowEdges = new LinkedHashSet<>(20);
// All of the ignore entities, plus everything we just found should be
// ignored by any recursion:
final Collection<EntityNode> toIgnore = new LinkedHashSet<>(3);
toIgnore.add(this);
toIgnore.addAll(ignore);
toIgnore.addAll(shadow);
for (final EntityNode node : shadow) {
// it:
if ((node == this) || ignore.contains(node)) {
continue;
}
// Otherwise, shadow its non-partial edges, and all of its shadow
// edges:
shadowEdges.addAll(node.getShadowEdges(toIgnore, constraintEdges));
shadowEdges.addAll(node.getNonPartialEdges());
// Remove edges already
shadowEdges.removeAll(constraintEdges);
// involved in the sum
}
return shadowEdges;
}
Aggregations