use of catdata.Triple in project fql by CategoricalData.
the class ColimSchExpModify method eval.
// TODO aql add options
@Override
public ColimitSchema<N> eval(AqlEnv env) {
boolean checkJava = !(Boolean) env.defaults.getOrDefault(options, AqlOption.allow_java_eqs_unsafe);
ColimitSchema<N> colim0 = colim.eval(env);
// colim0.schemaStr.co
for (Pair<String, String> k : ens) {
colim0 = colim0.renameEntity(new En(k.first), new En(k.second), checkJava);
}
for (Pair<Pair<String, String>, String> k : fks0) {
colim0 = colim0.renameFk(new Fk(new En(k.first.first), k.first.second), new Fk(new En(k.first.first), k.second), checkJava);
}
for (Pair<Pair<String, String>, String> k : atts0) {
colim0 = colim0.renameAtt(new Att(new En(k.first.first), k.first.second), new Att(new En(k.first.first), k.second), checkJava);
}
for (Pair<Pair<String, String>, List<String>> k : fks) {
if (!colim0.schemaStr.fks.containsKey(new Fk(new En(k.first.first), k.first.second))) {
throw new RuntimeException("Not an fk: " + k.first + " in\n\n" + colim0.schemaStr);
}
String pre = "In processing " + k.first + " -> " + k.second + ", ";
Collage<Ty, En, Sym, Fk, Att, Void, Void> xxx = colim0.schemaStr.collage();
RawTerm term = RawTerm.fold(k.second, "v");
En tr = colim0.schemaStr.fks.get(new Fk(new En(k.first.first), k.first.second)).second;
Ctx<String, Chc<Ty, En>> ctx = new Ctx<>("v", Chc.inRight(new En(k.first.first)));
Term<Ty, En, Sym, Fk, Att, Gen, Sk> t = RawTerm.infer1x(ctx.map, term, null, Chc.inRight(tr), xxx.convert(), pre, colim0.schemaStr.typeSide.js).second;
// colim0 = colim0.removeAtt(new Att(k.first), new Var(k.second.first), t.convert(), checkJava);
colim0 = colim0.removeFk(new Fk(new En(k.first.first), k.first.second), t.toFkList(), checkJava);
}
for (Pair<Pair<String, String>, Triple<String, String, RawTerm>> k : atts) {
if (!colim0.schemaStr.atts.containsKey(new Att(new En(k.first.first), k.first.second))) {
throw new RuntimeException("Not an attribute: " + k.first + " in\n\n" + colim0.schemaStr);
}
String pre = "In processing " + k.first + " -> lambda " + k.second.first + "." + k.second.third + ", ";
Pair<En, Ty> r = colim0.schemaStr.atts.get(new Att(new En(k.first.first), k.first.second));
if (k.second.second != null && !k.second.second.equals(r.first)) {
throw new RuntimeException(pre + " given type is " + k.second.second + " but expected " + r.first);
}
Collage<Ty, En, Sym, Fk, Att, Void, Void> xxx = colim0.schemaStr.collage();
Ctx<String, Chc<Ty, En>> ctx = new Ctx<>(k.second.first, Chc.inRight(r.first));
Term<Ty, En, Sym, Fk, Att, Gen, Sk> t = RawTerm.infer1x(ctx.map, k.second.third, null, Chc.inLeft(r.second), xxx.convert(), pre, colim0.schemaStr.typeSide.js).second;
colim0 = colim0.removeAtt(new Att(new En(k.first.first), k.first.second), new Var(k.second.first), t.convert(), checkJava);
}
return colim0;
}
use of catdata.Triple in project fql by CategoricalData.
the class InstExpJdbcAll method makeSchema.
public Schema<Ty, En, Sym, Fk, Att> makeSchema(AqlEnv env, SqlSchema info, AqlOptions ops) {
boolean checkJava = !(Boolean) ops.getOrDefault(AqlOption.allow_java_eqs_unsafe);
TypeSide<Ty, Sym> typeSide = new SqlTypeSide(ops);
// typeSide.validate(true);
Collage<Ty, En, Sym, Fk, Att, Void, Void> col0 = new Collage<>(typeSide.collage());
Set<Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>>> eqs = new HashSet<>();
for (SqlTable table : info.tables) {
col0.ens.add(new En(table.name));
for (SqlColumn c : table.columns) {
if (col0.atts.containsKey(new Att(new En(table.name), c.name))) {
throw new RuntimeException("Name collision: table " + c.table.name + " col " + c.name + " against table " + col0.atts.get(new Att(new En(table.name), c.name)).first + "\n\n.Possible solution: set option jdbc_import_col_seperator so as to avoid name collisions.");
}
col0.atts.put(new Att(new En(table.name), c.name), new Pair<>(new En(table.name), new Ty(sqlTypeToAqlType(c.type.name))));
}
}
for (SqlForeignKey fk : info.fks) {
col0.fks.put(new Fk(new En(fk.source.name), fk.toString()), new Pair<>(new En(fk.source.name), new En(fk.target.name)));
Var v = new Var("x");
for (SqlColumn tcol : fk.map.keySet()) {
SqlColumn scol = fk.map.get(tcol);
Att l = new Att(new En(scol.table.name), scol.name);
Att r = new Att(new En(tcol.table.name), tcol.name);
Term<Ty, En, Sym, Fk, Att, Void, Void> lhs = Term.Att(l, Term.Var(v));
Term<Ty, En, Sym, Fk, Att, Void, Void> rhs = Term.Att(r, Term.Fk(new Fk(new En(fk.source.name), fk.toString()), Term.Var(v)));
eqs.add(new Triple<>(new Pair<>(v, new En(fk.source.name)), lhs, rhs));
col0.eqs.add(new Eq<>(new Ctx<>(new Pair<>(v, Chc.inRight(new En(fk.source.name)))), lhs, rhs));
}
}
DP<Ty, En, Sym, Fk, Att, Void, Void> dp = AqlProver.create(new AqlOptions(options, col0, env.defaults), col0, typeSide.js);
Schema<Ty, En, Sym, Fk, Att> sch = new Schema<>(typeSide, col0.ens, col0.atts.map, col0.fks.map, eqs, dp, checkJava);
return sch;
}
use of catdata.Triple in project fql by CategoricalData.
the class ColimitSchema method renameEntity.
public ColimitSchema<N> renameEntity(En src, En dst, boolean checkJava) {
if (!schemaStr.ens.contains(src)) {
throw new RuntimeException(src + " is not an entity in \n" + schemaStr);
}
if (schemaStr.ens.contains(dst)) {
throw new RuntimeException(dst + " is already an entity in \n" + schemaStr);
}
Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att> isoToUser = Mapping.id(schemaStr);
Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att> isoFromUser = Mapping.id(schemaStr);
Function<Fk, Fk> updateFk = x -> {
if (x.en.equals(src)) {
return new Fk(dst, x.str);
}
return x;
};
Function<Fk, Fk> deUpdateFk = x -> {
if (x.en.equals(dst)) {
return new Fk(src, x.str);
}
return x;
};
Function<Att, Att> updateAtt = x -> {
if (x.en.equals(src)) {
return new Att(dst, x.str);
}
return x;
};
Function<Att, Att> deUpdateAtt = x -> {
if (x.en.equals(dst)) {
return new Att(src, x.str);
}
return x;
};
Set<En> ens = new HashSet<>(schemaStr.ens);
ens.remove(src);
ens.add(dst);
Map<Att, Pair<En, Ty>> atts = new HashMap<>();
for (Att k : schemaStr.atts.keySet()) {
Pair<En, Ty> v = schemaStr.atts.get(k);
En s = v.first.equals(src) ? dst : v.first;
atts.put(updateAtt.apply(k), new Pair<>(s, v.second));
}
Map<Fk, Pair<En, En>> fks = new HashMap<>();
for (Fk k : schemaStr.fks.keySet()) {
Pair<En, En> v = schemaStr.fks.get(k);
En s = v.first.equals(src) ? dst : v.first;
En t = v.second.equals(src) ? dst : v.second;
fks.put(updateFk.apply(k), new Pair<>(s, t));
}
Set<Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>>> eqs = new HashSet<>();
for (Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>> eq : schemaStr.eqs) {
Pair<Var, En> v = eq.first;
En t = v.second.equals(src) ? dst : v.second;
eqs.add(new Triple<>(new Pair<>(v.first, t), eq.second.mapFk(updateFk).mapAtt(updateAtt), eq.third.mapFk(updateFk).mapAtt(updateAtt)));
}
DP<Ty, En, Sym, Fk, Att, Void, Void> dp = new DP<Ty, En, Sym, Fk, Att, Void, Void>() {
@Override
public String toStringProver() {
return "rename entity of " + schemaStr.dp.toStringProver();
}
@Override
public // TODO aql check this
boolean eq(Ctx<Var, Chc<Ty, En>> ctx, Term<Ty, En, Sym, Fk, Att, Void, Void> lhs, Term<Ty, En, Sym, Fk, Att, Void, Void> rhs) {
return schemaStr.dp.eq(ctx.map(v -> v.left ? v : (v.r.equals(dst) ? Chc.inRight(src) : v)), lhs.mapFk(deUpdateFk).mapAtt(deUpdateAtt), rhs.mapFk(deUpdateFk).mapAtt(deUpdateAtt));
}
};
Schema<Ty, En, Sym, Fk, Att> schemaStr2 = // TODO aql java
new Schema<>(ty, ens, atts, fks, eqs, dp, checkJava);
Map<En, En> ensM = new HashMap<>();
for (En k : schemaStr.ens) {
ensM.put(k, k.equals(src) ? dst : k);
}
Map<Att, Triple<Var, En, Term<Ty, En, Sym, Fk, Att, Void, Void>>> attsM = new HashMap<>();
for (Att k : schemaStr.atts.keySet()) {
attsM.put(k, new Triple<Var, En, Term<Ty, En, Sym, Fk, Att, Void, Void>>(isoToUser.atts.get(k).first, isoToUser.atts.get(k).second.equals(src) ? dst : isoToUser.atts.get(k).second, isoToUser.atts.get(k).third.mapAtt(updateAtt).mapFk(updateFk)));
}
Map<Fk, Pair<En, List<Fk>>> fksM = new HashMap<>();
for (Fk k : schemaStr.fks.keySet()) {
fksM.put(k, new Pair<>(isoToUser.fks.get(k).first.equals(src) ? dst : isoToUser.fks.get(k).first, isoToUser.fks.get(k).second.stream().map(updateFk).collect(Collectors.toList())));
}
isoToUser = new Mapping<>(ensM, attsM, fksM, schemaStr, schemaStr2, checkJava);
Map<En, En> ensM2 = new HashMap<>();
for (En k : schemaStr2.ens) {
ensM2.put(k, k.equals(dst) ? src : k);
}
Map<Att, Triple<Var, En, Term<Ty, En, Sym, Fk, Att, Void, Void>>> attsM2 = new HashMap<>();
for (Att k : schemaStr2.atts.keySet()) {
attsM2.put(updateAtt.apply(k), new Triple<>(isoFromUser.atts.get(deUpdateAtt.apply(k)).first, isoFromUser.atts.get(deUpdateAtt.apply(k)).second.equals(dst) ? src : isoFromUser.atts.get(deUpdateAtt.apply(k)).second, isoFromUser.atts.get(deUpdateAtt.apply(k)).third));
}
Map<Fk, Pair<En, List<Fk>>> fksM2 = new HashMap<>();
for (Fk k : schemaStr2.fks.keySet()) {
fksM2.put(updateFk.apply(k), new Pair<>(isoFromUser.fks.get(deUpdateFk.apply(k)).first.equals(dst) ? src : isoFromUser.fks.get(deUpdateFk.apply(k)).first, isoFromUser.fks.get(deUpdateFk.apply(k)).second.stream().map(deUpdateFk).collect(Collectors.toList())));
}
isoFromUser = new Mapping<>(ensM2, attsM2, fksM2, schemaStr2, schemaStr, checkJava);
return wrap(isoToUser, isoFromUser);
}
use of catdata.Triple in project fql by CategoricalData.
the class ColimitSchema method renameAtt.
public ColimitSchema<N> renameAtt(Att src, Att dst, boolean checkJava) {
if (!schemaStr.atts.containsKey(src)) {
throw new RuntimeException(src + " is not an attribute of " + src.en + " in \n" + schemaStr);
}
if (schemaStr.atts.containsKey(dst)) {
throw new RuntimeException(dst + " is already an attribute in \n" + schemaStr);
}
Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att> isoToUser = Mapping.id(schemaStr);
Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att> isoFromUser = Mapping.id(schemaStr);
Function<Att, Att> fun = x -> x.equals(src) ? dst : x;
Function<Att, Att> fun2 = x -> x.equals(dst) ? src : x;
Map<Att, Pair<En, Ty>> atts = new HashMap<>();
for (Att k : schemaStr.atts.keySet()) {
atts.put(fun.apply(k), schemaStr.atts.get(k));
}
Set<Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>>> eqs = new HashSet<>();
for (Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>> eq : schemaStr.eqs) {
eqs.add(new Triple<>(eq.first, eq.second.mapAtt(fun), eq.third.mapAtt(fun)));
}
DP<Ty, En, Sym, Fk, Att, Void, Void> dp = new DP<Ty, En, Sym, Fk, Att, Void, Void>() {
@Override
public String toStringProver() {
return "rename attribute of " + schemaStr.dp.toStringProver();
}
@Override
public boolean eq(Ctx<Var, Chc<Ty, En>> ctx, Term<Ty, En, Sym, Fk, Att, Void, Void> lhs, Term<Ty, En, Sym, Fk, Att, Void, Void> rhs) {
return schemaStr.dp.eq(ctx, lhs.mapAtt(fun2), rhs.mapAtt(fun2));
}
};
Schema<Ty, En, Sym, Fk, Att> schemaStr2 = new Schema<>(ty, schemaStr.ens, atts, schemaStr.fks.map, eqs, dp, checkJava);
Map<Att, Triple<Var, En, Term<Ty, En, Sym, Fk, Att, Void, Void>>> attsM = new HashMap<>();
for (Att k : schemaStr.atts.keySet()) {
attsM.put(k, new Triple<>(isoToUser.atts.get(k).first, isoToUser.atts.get(k).second, isoToUser.atts.get(k).third.mapAtt(fun)));
}
isoToUser = new Mapping<>(isoToUser.ens.map, attsM, isoToUser.fks.map, schemaStr, schemaStr2, checkJava);
Map<Att, Triple<Var, En, Term<Ty, En, Sym, Fk, Att, Void, Void>>> attsM2 = new HashMap<>();
for (Att k : schemaStr2.atts.keySet()) {
Var v = new Var("v");
attsM2.put(k, new Triple<>(v, schemaStr2.atts.get(k).first, Term.Att(fun2.apply(k), Term.Var(v))));
}
isoFromUser = new Mapping<>(isoFromUser.ens.map, attsM2, isoFromUser.fks.map, schemaStr2, schemaStr, checkJava);
return wrap(isoToUser, isoFromUser);
}
use of catdata.Triple in project fql by CategoricalData.
the class ColimitSchema method initialUser.
private Pair<Schema<Ty, En, Sym, Fk, Att>, Ctx<N, Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att>>> initialUser(AqlOptions options, Collage<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>, Void, Void> col, Set<Triple<Pair<Var, Set<Pair<N, En>>>, Term<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>, Void, Void>, Term<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>, Void, Void>>> eqs, Ctx<Pair<N, En>, Set<Pair<N, En>>> eqcs, Schema<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>> schema) {
Ctx<N, Mapping<Ty, En, Sym, Fk, Att, Set<Pair<N, En>>, Pair<N, Fk>, Pair<N, Att>>> mappings = new Ctx<>();
for (N n : nodes.keySet()) {
Map<Att, Triple<Var, Set<Pair<N, En>>, Term<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>, Void, Void>>> atts = new HashMap<>();
Map<Fk, Pair<Set<Pair<N, En>>, List<Pair<N, Fk>>>> fks = new HashMap<>();
Map<En, Set<Pair<N, En>>> ens0 = new HashMap<>();
Schema<Ty, En, Sym, Fk, Att> s = nodes.get(n);
for (En en : s.ens) {
ens0.put(en, eqcs.get(new Pair<>(n, en)));
}
for (Fk fk : s.fks.keySet()) {
fks.put(fk, new Pair<>(eqcs.get(new Pair<>(n, s.fks.get(fk).first)), Util.singList(new Pair<>(n, fk))));
}
for (Att att : s.atts.keySet()) {
Var v = new Var("v");
Term<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>, Void, Void> t = Term.Att(new Pair<>(n, att), Term.Var(v));
atts.put(att, new Triple<>(v, eqcs.get(new Pair<>(n, s.atts.get(att).first)), t));
}
// TODO aql allow as option?
Mapping<Ty, En, Sym, Fk, Att, Set<Pair<N, En>>, Pair<N, Fk>, Pair<N, Att>> m = new Mapping<>(ens0, atts, fks, nodes.get(n), schema, false);
mappings.put(n, m);
}
Collage<Ty, En, Sym, Fk, Att, Void, Void> colX = new Collage<>(ty.collage());
colX.ens.addAll(col.ens.stream().map(ColimitSchema::conv1).collect(Collectors.toSet()));
colX.atts.map.putAll(col.atts.map((k, v) -> new Pair<>(new Att(conv1(col.atts.get(k).first), conv2Att(k)), new Pair<>(conv1(v.first), v.second))).map);
colX.fks.putAll(col.fks.map((k, v) -> new Pair<>(new Fk(conv1(col.fks.get(k).first), conv2Fk(k)), new Pair<>(conv1(v.first), conv1(v.second)))).map);
Set<Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>>> eqsX = eqs.stream().map(t -> new Triple<>(new Pair<>(t.first.first, conv1(t.first.second)), conv3(col, t.second), conv3(col, t.third))).collect(Collectors.toSet());
colX.eqs.addAll(col.eqs.stream().map(t -> new Eq<>(t.ctx.map((k, v) -> new Pair<>(k, conv4(v))), conv3(col, t.lhs), conv3(col, t.rhs))).collect(Collectors.toSet()));
// AqlOptions opsX = new AqlOptions(options, colX);
DP<Ty, En, Sym, Fk, Att, Void, Void> dpX = AqlProver.create(options, colX, ty.js);
Schema<Ty, En, Sym, Fk, Att> schemaStr = new Schema<>(ty, colX.ens, colX.atts.map, colX.fks.map, eqsX, dpX, false);
Ctx<N, Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att>> mappingsStr = new Ctx<>();
for (N n : mappings.keySet()) {
mappingsStr.put(n, conv5(col, schemaStr, mappings.get(n)));
}
return new Pair<>(schemaStr, mappingsStr);
}
Aggregations