use of catdata.fpql.XPoly.Block in project fql by CategoricalData.
the class EnrichViewer method idPoly.
private static XPoly<String, String> idPoly(XSchema isa, String isa0, String merged) {
Map<Object, Pair<String, Block<String, String>>> blocks = new HashMap<>();
for (String node : isa.nodes) {
Map<Object, String> from = new HashMap<>();
from.put("v", node);
Set<Pair<List<Object>, List<Object>>> where = new HashSet<>();
Map<String, List<Object>> attrs = new HashMap<>();
Map<String, Pair<Object, Map<Object, List<Object>>>> edges = new HashMap<>();
for (Triple<String, String, String> arrow : isa.arrows) {
if (!arrow.second.equals(node)) {
continue;
}
if (isa.nodes.contains(arrow.third)) {
Map<String, List<String>> map = new HashMap<>();
List<String> l = new LinkedList<>();
l.add("v");
l.add(arrow.first);
map.put("v", l);
@SuppressWarnings({ "unchecked", "rawtypes" }) Pair<Object, Map<Object, List<Object>>> ppp = new Pair("q_" + arrow.third, map);
edges.put(arrow.first, ppp);
} else {
List<Object> l = new LinkedList<>();
l.add("v");
l.add(arrow.first);
attrs.put(arrow.first, l);
}
}
Block<String, String> block = new Block<>(from, where, attrs, edges);
blocks.put("q_" + node, new Pair<>(node, block));
}
return new XPoly<>(new Var(isa0), new Var(merged), blocks);
}
use of catdata.fpql.XPoly.Block in project fql by CategoricalData.
the class XProd method checkEdges.
// : check that INSTANCEs are saturated?
@SuppressWarnings("unused")
private static <C, D> void checkEdges(XPoly<C, D> poly, Map<Object, XCtx> frozens) {
for (Object k : poly.blocks.keySet()) {
Pair<D, Block<C, D>> b = poly.blocks.get(k);
XCtx src = frozens.get(k);
for (D term : poly.dst.terms()) {
Pair<D, D> t = poly.dst.type(term);
if (!t.first.equals(b.first)) {
continue;
}
if (t.second.equals("_1")) {
continue;
}
if (poly.dst.allIds().contains(term)) {
continue;
}
if (poly.dst.global.allTerms().contains(t.second)) {
continue;
}
if (!b.second.edges.containsKey(term)) {
throw new RuntimeException("Missing mapping for edge " + term + " in " + k);
}
}
Set atts = new HashSet();
for (D arr : poly.dst.allTerms()) {
if (poly.dst.allIds().contains(arr)) {
continue;
}
Pair<D, D> ty = poly.dst.type(arr);
if (ty.second.equals("_1")) {
continue;
}
if (!ty.first.equals(b.first)) {
continue;
}
if (!poly.dst.ids.contains(ty.second)) {
atts.add(arr);
}
}
if (!atts.equals(b.second.attrs.keySet())) {
throw new RuntimeException("Bad attributes in " + k + ": " + atts + " vs " + b.second.attrs.keySet());
}
for (D k2 : b.second.edges.keySet()) {
Pair<Object, Map<Object, List<Object>>> v2 = b.second.edges.get(k2);
XCtx dst = frozens.get(v2.first);
if (dst == null) {
throw new RuntimeException("Edge " + k2 + " goes to non-existent node ");
}
Map em = new HashMap<>(v2.second);
for (Object o : dst.schema.allTerms()) {
if (em.containsKey(o)) {
continue;
}
List l = new LinkedList();
l.add(o);
em.put(o, l);
}
new XMapping(dst, src, em, "transform");
}
}
}
use of catdata.fpql.XPoly.Block in project fql by CategoricalData.
the class XMapping method uber.
/*
* public XMapping<C, D> rel() { Map m = new HashMap<>();
*
* for (C c : src.terms()) { Pair<C, C> t = src.type(c); if
* (src.schema.allTerms().contains(c)) {
*
* continue; }
*
* List<C> l = new LinkedList<>(); l.add(c); List<Map<Triple<C, C, List<C>>,
* Triple<C, C, List<C>>>> lx = src.obs(l);
*
* List<Map<Triple<D, D, List<D>>, Triple<D, D, List<D>>>> rx =
* dst.obs(em.get(c)); dst.rel().type((List<D>)rx); //sanity check
*
* m.put(lx.get(0), rx); }
*
* for (Object o : src.schema.allTerms()) { if (m.containsKey(o)) {
* continue; } List l = new LinkedList(); l.add(o); m.put(o, l); }
*
* return new XMapping<>(src.rel(), dst.rel(), m, "homomorphism"); }
*/
@SuppressWarnings("unchecked")
public XPoly<C, D> uber() {
Map<Object, Pair<D, Block<C, D>>> map = new HashMap<>();
Map<D, XCtx<Pair<Triple<D, D, List<D>>, C>>> dfys = new HashMap<>();
Map<D, XCtx<D>> ys = new HashMap<>();
for (D d : dst.allIds()) {
XCtx<D> y = dst.y((D) "u_u", d);
XCtx<Pair<Triple<D, D, List<D>>, C>> dfy = delta(y);
dfys.put(d, dfy);
ys.put(d, y);
}
Map<D, XMapping<Pair<Triple<D, D, List<D>>, C>, Pair<Triple<D, D, List<D>>, C>>> dfys_t = new HashMap<>();
for (D e : dst.allTerms()) {
if (dst.allIds().contains(e)) {
continue;
}
Pair<D, D> t = dst.type(e);
D d = t.first;
D d0 = t.second;
XMapping<D, D> h = uber_sub((D) "u_u", (D) "u_u", e, ys.get(d), ys.get(d0));
dfys_t.put(e, deltaT(h));
}
for (D d : dst.ids) {
XCtx<Pair<Triple<D, D, List<D>>, C>> dfy = dfys.get(d);
Map<Object, C> from = new HashMap<>();
for (Pair<Triple<D, D, List<D>>, C> cf : dfy.terms()) {
from.put(cf, cf.second);
}
@SuppressWarnings("rawtypes") Set where = new HashSet<>(dfy.eqs);
Map<D, Pair<Object, Map<Object, List<Object>>>> edges = new HashMap<>();
Map<D, List<Object>> attrs = new HashMap<>();
for (D e : dst.terms()) {
if (dst.allIds().contains(e)) {
continue;
}
Pair<D, D> t = dst.type(e);
if (!t.first.equals(d) || t.second.equals("_1")) {
continue;
}
D d0 = t.second;
XMapping<Pair<Triple<D, D, List<D>>, C>, Pair<Triple<D, D, List<D>>, C>> dfh = dfys_t.get(e);
if (dfh == null) {
throw new RuntimeException("missing: edge " + e + " in " + dfys_t.keySet());
}
XCtx<Pair<Triple<D, D, List<D>>, C>> dfy0 = dfys.get(d0);
if (dfy0 == null) {
throw new RuntimeException();
}
if (dst.ids.contains(d0)) {
@SuppressWarnings("rawtypes") Map edge_m = new HashMap<>();
dfy0.terms();
for (Pair<Triple<D, D, List<D>>, C> cf : dfy0.terms()) {
edge_m.put(cf, dfh.em.get(cf));
}
edges.put(e, new Pair<>("q" + d0, edge_m));
} else {
@SuppressWarnings("rawtypes") List lll = dfh.em.get(new Pair<>(new Triple<>((D) "_1", d0, Util.singList((D) "u_u")), (C) d0));
if (lll == null) {
throw new RuntimeException();
}
attrs.put(e, lll);
}
}
Block<C, D> block = new Block<>(from, where, attrs, edges);
map.put("q" + d, new Pair<>(d, block));
}
XPoly<C, D> ret = new XPoly<>(src, dst, map);
return ret;
}
use of catdata.fpql.XPoly.Block in project fql by CategoricalData.
the class XParser method fromBlock.
private static Block<String, String> fromBlock(Object o) {
Tuple4<List, List, List, List> t = (Tuple4<List, List, List, List>) o;
Map<Object, String> from = new HashMap<>();
Set<Pair<List<Object>, List<Object>>> where = new HashSet<>();
Map<String, List<Object>> attrs = new HashMap<>();
Map<String, Pair<Object, Map<Object, List<Object>>>> edges = new HashMap<>();
for (Object x : t.a) {
Tuple3 l = (Tuple3) x;
if (from.containsKey(l.a.toString())) {
throw new RuntimeException("Duplicate for: " + l.a);
}
from.put(l.a.toString(), l.c.toString());
}
for (Object x : t.b) {
Tuple3 l = (Tuple3) x;
where.add(new Pair(l.a, l.c));
}
for (Object x : t.c) {
Tuple3 l = (Tuple3) x;
if (attrs.containsKey(l.a.toString())) {
throw new RuntimeException("Duplicate for: " + l.a);
}
attrs.put(l.a.toString(), (List<Object>) l.c);
}
for (Object x : t.d) {
Tuple5 l = (Tuple5) x;
if (from.containsKey(l.a.toString())) {
throw new RuntimeException("Duplicate for: " + l.a);
}
edges.put(l.a.toString(), new Pair(l.e.toString(), fromBlockHelper(l.c)));
}
return new Block<>(from, where, attrs, edges);
}
use of catdata.fpql.XPoly.Block in project fql by CategoricalData.
the class XProd method uberflower.
// : make sure this is conjunctive otherwise throw an error //duplicate for later
// : on saturated with discrete op will be saturated
// : must add (not query label) (TARGET NODE) EVEN FOR THE CONJUNCTIVE CASE //add label here
// : do pre-filtering based on lhs = const (ground) here //won't help
public static <C, D> XCtx<Pair<Object, Map<Object, Triple<C, C, List<C>>>>> uberflower(XPoly<C, D> poly, XCtx<C> I) {
// XCtx c = frozen(flower, I.schema);
Map<Object, Set<Map<Object, Triple<C, C, List<C>>>>> top = new HashMap<>();
Map<Object, XCtx> frozens = new HashMap();
for (Object flower_name : poly.blocks.keySet()) {
Pair<D, Block<C, D>> flowerX = poly.blocks.get(flower_name);
// D flower_dst = flowerX.first;
Block<C, D> flower = flowerX.second;
Set<Map<Object, Triple<C, C, List<C>>>> ret = new HashSet<>();
ret.add(new HashMap<>());
for (Object var : flower.from.keySet()) {
C node = flower.from.get(var);
Set<Map<Object, Triple<C, C, List<C>>>> ret2 = new HashSet<>();
for (Map<Object, Triple<C, C, List<C>>> tuple : ret) {
for (Triple<C, C, List<C>> t : I.cat().hom((C) "_1", node)) {
Map<Object, Triple<C, C, List<C>>> merged = new HashMap<>(tuple);
merged.put(var, t);
String result = eval(convert2(flower.where), merged, flower.from.keySet(), I);
if (result.equals("false")) {
continue;
}
ret2.add(merged);
}
}
ret = ret2;
}
top.put(flower_name, ret);
frozens.put(flower_name, frozen(flower, I.schema));
}
checkEdges(poly, frozens);
// instance
Set<Pair<Object, Map<Object, Triple<C, C, List<C>>>>> ids = new HashSet<>();
Map<Pair<Object, Map<Object, Triple<C, C, List<C>>>>, Pair<Pair<Object, Map<Object, Triple<C, C, List<C>>>>, Pair<Object, Map<Object, Triple<C, C, List<C>>>>>> types = new HashMap<>();
Set<Pair<List<Pair<Object, Map<Object, Triple<C, C, List<C>>>>>, List<Pair<Object, Map<Object, Triple<C, C, List<C>>>>>>> eqs = new HashSet<>();
for (Object flower_name : poly.blocks.keySet()) {
Set<Map<Object, Triple<C, C, List<C>>>> ret = top.get(flower_name);
Pair<D, Block<C, D>> flowerX = poly.blocks.get(flower_name);
D flower_dst = flowerX.first;
Block<C, D> flower = flowerX.second;
XCtx c = frozens.get(flower_name);
for (Map<Object, Triple<C, C, List<C>>> k : ret) {
types.put(new Pair<>(flower_name, k), new Pair("_1", flower_dst));
for (D edge : flower.attrs.keySet()) {
Object tgt = c.type(flower.attrs.get(edge)).second;
if (!I.global.ids.contains(tgt)) {
throw new RuntimeException("Selection path " + edge + " does not target a type");
}
List lhs = new LinkedList();
lhs.add(new Pair<>(flower_name, k));
lhs.add(edge);
// must normalize in I
List<C> rhs0 = subst_new(flower.attrs.get(edge), k, new HashSet(), new HashSet());
Triple<C, C, List<C>> rhs = I.find_fast(new Triple("_1", tgt, rhs0));
List rhsX = new LinkedList();
if (I.schema.cat().hom((C) "_1", (C) tgt).contains(rhs)) {
if (rhs.third.isEmpty()) {
rhsX.add(rhs.first);
} else {
rhsX.addAll(rhs.third);
}
} else {
rhsX.add(rhs);
}
eqs.add(new Pair(lhs, rhsX));
}
for (D edge : flower.edges.keySet()) {
// D tgt = poly.dst.type(edge).second;
List lhs = new LinkedList();
lhs.add(new Pair<>(flower_name, k));
lhs.add(edge);
// must normalize in I
Map rhs0Q = new HashMap();
for (Object str : flower.edges.get(edge).second.keySet()) {
List<C> rhs0Z = subst_new(flower.edges.get(edge).second.get(str), k, new HashSet(), new HashSet());
rhs0Q.put(str, rhs0Z);
}
Map found = null;
outer: for (Map<Object, Triple<C, C, List<C>>> map : top.get(flower.edges.get(edge).first)) {
for (Object str : map.keySet()) {
if (!I.getKB().equiv(map.get(str).third, (List<C>) rhs0Q.get(str))) {
continue outer;
}
}
if (found != null) {
throw new RuntimeException();
}
found = map;
}
if (found == null) {
throw new RuntimeException("Cannot find ID " + rhs0Q + " in " + top.get(flower.edges.get(edge)));
}
List rhsX = new LinkedList();
rhsX.add(new Pair<>(flower.edges.get(edge).first, found));
eqs.add(new Pair(lhs, rhsX));
}
}
}
Map types0 = types;
for (C t : I.global.ids) {
for (Triple<C, C, List<C>> arr : I.cat().hom((C) "_1", t)) {
if (I.global.cat().hom((C) "_1", t).contains(arr)) {
continue;
}
types0.put(arr, new Pair<>("_1", t));
for (Entry<C, Pair<C, C>> e : I.global.types.entrySet()) {
if (!e.getValue().first.equals(t)) {
continue;
}
List lhs = new LinkedList();
lhs.add(arr);
lhs.add(e.getKey());
List<C> rhs0 = new LinkedList<>();
// rhs0.add(arr.second);
rhs0.addAll(arr.third);
rhs0.add(e.getKey());
Triple<C, C, List<C>> rhsX = I.find_fast(new Triple<>((C) "_1", e.getValue().second, rhs0));
List rhs = new LinkedList();
if (I.schema.cat().hom((C) "_1", e.getValue().second).contains(rhsX)) {
if (rhsX.third.isEmpty()) {
rhs.add(rhsX.first);
} else {
rhs.addAll(rhsX.third);
}
} else {
rhs.add(rhsX);
}
eqs.add(new Pair<>(lhs, rhs));
}
}
}
XCtx J = new XCtx(ids, types, eqs, I.global, poly.dst, "instance");
J.saturated = true;
return J;
}
Aggregations