use of catdata.aql.exp.SchExpRaw.Fk in project fql by CategoricalData.
the class InstExpJdbcAll method makeSchema.
public Schema<Ty, En, Sym, Fk, Att> makeSchema(AqlEnv env, SqlSchema info, AqlOptions ops) {
boolean checkJava = !(Boolean) ops.getOrDefault(AqlOption.allow_java_eqs_unsafe);
TypeSide<Ty, Sym> typeSide = new SqlTypeSide(ops);
// typeSide.validate(true);
Collage<Ty, En, Sym, Fk, Att, Void, Void> col0 = new Collage<>(typeSide.collage());
Set<Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>>> eqs = new HashSet<>();
for (SqlTable table : info.tables) {
col0.ens.add(new En(table.name));
for (SqlColumn c : table.columns) {
if (col0.atts.containsKey(new Att(new En(table.name), c.name))) {
throw new RuntimeException("Name collision: table " + c.table.name + " col " + c.name + " against table " + col0.atts.get(new Att(new En(table.name), c.name)).first + "\n\n.Possible solution: set option jdbc_import_col_seperator so as to avoid name collisions.");
}
col0.atts.put(new Att(new En(table.name), c.name), new Pair<>(new En(table.name), new Ty(sqlTypeToAqlType(c.type.name))));
}
}
for (SqlForeignKey fk : info.fks) {
col0.fks.put(new Fk(new En(fk.source.name), fk.toString()), new Pair<>(new En(fk.source.name), new En(fk.target.name)));
Var v = new Var("x");
for (SqlColumn tcol : fk.map.keySet()) {
SqlColumn scol = fk.map.get(tcol);
Att l = new Att(new En(scol.table.name), scol.name);
Att r = new Att(new En(tcol.table.name), tcol.name);
Term<Ty, En, Sym, Fk, Att, Void, Void> lhs = Term.Att(l, Term.Var(v));
Term<Ty, En, Sym, Fk, Att, Void, Void> rhs = Term.Att(r, Term.Fk(new Fk(new En(fk.source.name), fk.toString()), Term.Var(v)));
eqs.add(new Triple<>(new Pair<>(v, new En(fk.source.name)), lhs, rhs));
col0.eqs.add(new Eq<>(new Ctx<>(new Pair<>(v, Chc.inRight(new En(fk.source.name)))), lhs, rhs));
}
}
DP<Ty, En, Sym, Fk, Att, Void, Void> dp = AqlProver.create(new AqlOptions(options, col0, env.defaults), col0, typeSide.js);
Schema<Ty, En, Sym, Fk, Att> sch = new Schema<>(typeSide, col0.ens, col0.atts.map, col0.fks.map, eqs, dp, checkJava);
return sch;
}
use of catdata.aql.exp.SchExpRaw.Fk in project fql by CategoricalData.
the class InstExpJdbcAll method toInstance.
private Instance<Ty, En, Sym, Fk, Att, Gen, Null<?>, Gen, Null<?>> toInstance(AqlEnv env, SqlInstance inst, SqlSchema info) {
AqlOptions ops = new AqlOptions(options, null, env.defaults);
Schema<Ty, En, Sym, Fk, Att> sch = makeSchema(env, info, ops);
Ctx<En, Collection<Gen>> ens0 = new Ctx<>(Util.newSetsFor0(sch.ens));
Ctx<Ty, Collection<Null<?>>> tys0 = new Ctx<>();
Ctx<Gen, Ctx<Fk, Gen>> fks0 = new Ctx<>();
Ctx<Gen, Ctx<Att, Term<Ty, Void, Sym, Void, Void, Void, Null<?>>>> atts0 = new Ctx<>();
Ctx<Null<?>, Term<Ty, En, Sym, Fk, Att, Gen, Null<?>>> extraRepr = new Ctx<>();
for (Ty ty : sch.typeSide.tys) {
tys0.put(ty, new HashSet<>());
}
boolean schemaOnly = (Boolean) ops.getOrDefault(AqlOption.schema_only);
boolean nullOnErr = (Boolean) ops.getOrDefault(AqlOption.import_null_on_err_unsafe);
boolean dontCheckClosure = (Boolean) ops.getOrDefault(AqlOption.import_dont_check_closure_unsafe);
if (!schemaOnly) {
int fr = 0;
Map<SqlTable, Map<Map<SqlColumn, Optional<Object>>, Gen>> iso1 = new HashMap<>();
for (SqlTable table : info.tables) {
Set<Map<SqlColumn, Optional<Object>>> tuples = inst.get(table);
Map<Map<SqlColumn, Optional<Object>>, Gen> i1 = new HashMap<>();
SqlColumn thePk = null;
if (table.pk.size() == 1) {
thePk = Util.get0(table.pk);
}
for (Map<SqlColumn, Optional<Object>> tuple : tuples) {
Gen i = new Gen("v" + (fr++));
/* can't do this until Gen need not be unique
if (thePk == null) {
i = new Gen("v" + (fr++));
} else {
Optional<Object> x = tuple.get(thePk);
if (!x.isPresent()) {
throw new RuntimeException("Primary key col is null in " + tuple);
}
i = new Gen(x.get().toString()); //TODO aql
}
*/
i1.put(tuple, i);
// tuple.keySet().
// i2.put(i, tuple);
ens0.get(new En(table.name)).add(i);
for (SqlColumn c : table.columns) {
if (!atts0.containsKey(i)) {
atts0.put(i, new Ctx<>());
}
Optional<Object> val = tuple.get(c);
Term<Ty, Void, Sym, Void, Void, Void, Null<?>> xxx = InstExpJdbc.objectToSk(sch, val.orElse(null), i, new Att(new En(table.name), c.name), tys0, extraRepr, false, nullOnErr);
atts0.get(i).put(new Att(new En(table.name), c.name), xxx);
}
}
iso1.put(table, i1);
// iso2.put(table, i2);
}
for (SqlForeignKey fk : info.fks) {
for (Map<SqlColumn, Optional<Object>> in : inst.get(fk.source)) {
Map<SqlColumn, Optional<Object>> out = inst.follow(in, fk);
Gen tgen = iso1.get(fk.target).get(out);
Gen sgen = iso1.get(fk.source).get(in);
if (!fks0.containsKey(sgen)) {
fks0.put(sgen, new Ctx<>());
}
fks0.get(sgen).put(new Fk(new En(fk.source.name), fk.toString()), tgen);
}
}
}
ImportAlgebra<Ty, En, Sym, Fk, Att, Gen, Null<?>> alg = new ImportAlgebra<Ty, En, Sym, Fk, Att, Gen, Null<?>>(sch, ens0, tys0, fks0, atts0, Object::toString, Object::toString, dontCheckClosure);
return new SaturatedInstance<>(alg, alg, (Boolean) ops.getOrDefault(AqlOption.require_consistency), (Boolean) ops.getOrDefault(AqlOption.allow_java_eqs_unsafe), true, extraRepr);
}
use of catdata.aql.exp.SchExpRaw.Fk in project fql by CategoricalData.
the class ColimitSchema method renameEntity.
public ColimitSchema<N> renameEntity(En src, En dst, boolean checkJava) {
if (!schemaStr.ens.contains(src)) {
throw new RuntimeException(src + " is not an entity in \n" + schemaStr);
}
if (schemaStr.ens.contains(dst)) {
throw new RuntimeException(dst + " is already an entity in \n" + schemaStr);
}
Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att> isoToUser = Mapping.id(schemaStr);
Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att> isoFromUser = Mapping.id(schemaStr);
Function<Fk, Fk> updateFk = x -> {
if (x.en.equals(src)) {
return new Fk(dst, x.str);
}
return x;
};
Function<Fk, Fk> deUpdateFk = x -> {
if (x.en.equals(dst)) {
return new Fk(src, x.str);
}
return x;
};
Function<Att, Att> updateAtt = x -> {
if (x.en.equals(src)) {
return new Att(dst, x.str);
}
return x;
};
Function<Att, Att> deUpdateAtt = x -> {
if (x.en.equals(dst)) {
return new Att(src, x.str);
}
return x;
};
Set<En> ens = new HashSet<>(schemaStr.ens);
ens.remove(src);
ens.add(dst);
Map<Att, Pair<En, Ty>> atts = new HashMap<>();
for (Att k : schemaStr.atts.keySet()) {
Pair<En, Ty> v = schemaStr.atts.get(k);
En s = v.first.equals(src) ? dst : v.first;
atts.put(updateAtt.apply(k), new Pair<>(s, v.second));
}
Map<Fk, Pair<En, En>> fks = new HashMap<>();
for (Fk k : schemaStr.fks.keySet()) {
Pair<En, En> v = schemaStr.fks.get(k);
En s = v.first.equals(src) ? dst : v.first;
En t = v.second.equals(src) ? dst : v.second;
fks.put(updateFk.apply(k), new Pair<>(s, t));
}
Set<Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>>> eqs = new HashSet<>();
for (Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>> eq : schemaStr.eqs) {
Pair<Var, En> v = eq.first;
En t = v.second.equals(src) ? dst : v.second;
eqs.add(new Triple<>(new Pair<>(v.first, t), eq.second.mapFk(updateFk).mapAtt(updateAtt), eq.third.mapFk(updateFk).mapAtt(updateAtt)));
}
DP<Ty, En, Sym, Fk, Att, Void, Void> dp = new DP<Ty, En, Sym, Fk, Att, Void, Void>() {
@Override
public String toStringProver() {
return "rename entity of " + schemaStr.dp.toStringProver();
}
@Override
public // TODO aql check this
boolean eq(Ctx<Var, Chc<Ty, En>> ctx, Term<Ty, En, Sym, Fk, Att, Void, Void> lhs, Term<Ty, En, Sym, Fk, Att, Void, Void> rhs) {
return schemaStr.dp.eq(ctx.map(v -> v.left ? v : (v.r.equals(dst) ? Chc.inRight(src) : v)), lhs.mapFk(deUpdateFk).mapAtt(deUpdateAtt), rhs.mapFk(deUpdateFk).mapAtt(deUpdateAtt));
}
};
Schema<Ty, En, Sym, Fk, Att> schemaStr2 = // TODO aql java
new Schema<>(ty, ens, atts, fks, eqs, dp, checkJava);
Map<En, En> ensM = new HashMap<>();
for (En k : schemaStr.ens) {
ensM.put(k, k.equals(src) ? dst : k);
}
Map<Att, Triple<Var, En, Term<Ty, En, Sym, Fk, Att, Void, Void>>> attsM = new HashMap<>();
for (Att k : schemaStr.atts.keySet()) {
attsM.put(k, new Triple<Var, En, Term<Ty, En, Sym, Fk, Att, Void, Void>>(isoToUser.atts.get(k).first, isoToUser.atts.get(k).second.equals(src) ? dst : isoToUser.atts.get(k).second, isoToUser.atts.get(k).third.mapAtt(updateAtt).mapFk(updateFk)));
}
Map<Fk, Pair<En, List<Fk>>> fksM = new HashMap<>();
for (Fk k : schemaStr.fks.keySet()) {
fksM.put(k, new Pair<>(isoToUser.fks.get(k).first.equals(src) ? dst : isoToUser.fks.get(k).first, isoToUser.fks.get(k).second.stream().map(updateFk).collect(Collectors.toList())));
}
isoToUser = new Mapping<>(ensM, attsM, fksM, schemaStr, schemaStr2, checkJava);
Map<En, En> ensM2 = new HashMap<>();
for (En k : schemaStr2.ens) {
ensM2.put(k, k.equals(dst) ? src : k);
}
Map<Att, Triple<Var, En, Term<Ty, En, Sym, Fk, Att, Void, Void>>> attsM2 = new HashMap<>();
for (Att k : schemaStr2.atts.keySet()) {
attsM2.put(updateAtt.apply(k), new Triple<>(isoFromUser.atts.get(deUpdateAtt.apply(k)).first, isoFromUser.atts.get(deUpdateAtt.apply(k)).second.equals(dst) ? src : isoFromUser.atts.get(deUpdateAtt.apply(k)).second, isoFromUser.atts.get(deUpdateAtt.apply(k)).third));
}
Map<Fk, Pair<En, List<Fk>>> fksM2 = new HashMap<>();
for (Fk k : schemaStr2.fks.keySet()) {
fksM2.put(updateFk.apply(k), new Pair<>(isoFromUser.fks.get(deUpdateFk.apply(k)).first.equals(dst) ? src : isoFromUser.fks.get(deUpdateFk.apply(k)).first, isoFromUser.fks.get(deUpdateFk.apply(k)).second.stream().map(deUpdateFk).collect(Collectors.toList())));
}
isoFromUser = new Mapping<>(ensM2, attsM2, fksM2, schemaStr2, schemaStr, checkJava);
return wrap(isoToUser, isoFromUser);
}
use of catdata.aql.exp.SchExpRaw.Fk in project fql by CategoricalData.
the class ColimitSchema method renameAtt.
public ColimitSchema<N> renameAtt(Att src, Att dst, boolean checkJava) {
if (!schemaStr.atts.containsKey(src)) {
throw new RuntimeException(src + " is not an attribute of " + src.en + " in \n" + schemaStr);
}
if (schemaStr.atts.containsKey(dst)) {
throw new RuntimeException(dst + " is already an attribute in \n" + schemaStr);
}
Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att> isoToUser = Mapping.id(schemaStr);
Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att> isoFromUser = Mapping.id(schemaStr);
Function<Att, Att> fun = x -> x.equals(src) ? dst : x;
Function<Att, Att> fun2 = x -> x.equals(dst) ? src : x;
Map<Att, Pair<En, Ty>> atts = new HashMap<>();
for (Att k : schemaStr.atts.keySet()) {
atts.put(fun.apply(k), schemaStr.atts.get(k));
}
Set<Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>>> eqs = new HashSet<>();
for (Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>> eq : schemaStr.eqs) {
eqs.add(new Triple<>(eq.first, eq.second.mapAtt(fun), eq.third.mapAtt(fun)));
}
DP<Ty, En, Sym, Fk, Att, Void, Void> dp = new DP<Ty, En, Sym, Fk, Att, Void, Void>() {
@Override
public String toStringProver() {
return "rename attribute of " + schemaStr.dp.toStringProver();
}
@Override
public boolean eq(Ctx<Var, Chc<Ty, En>> ctx, Term<Ty, En, Sym, Fk, Att, Void, Void> lhs, Term<Ty, En, Sym, Fk, Att, Void, Void> rhs) {
return schemaStr.dp.eq(ctx, lhs.mapAtt(fun2), rhs.mapAtt(fun2));
}
};
Schema<Ty, En, Sym, Fk, Att> schemaStr2 = new Schema<>(ty, schemaStr.ens, atts, schemaStr.fks.map, eqs, dp, checkJava);
Map<Att, Triple<Var, En, Term<Ty, En, Sym, Fk, Att, Void, Void>>> attsM = new HashMap<>();
for (Att k : schemaStr.atts.keySet()) {
attsM.put(k, new Triple<>(isoToUser.atts.get(k).first, isoToUser.atts.get(k).second, isoToUser.atts.get(k).third.mapAtt(fun)));
}
isoToUser = new Mapping<>(isoToUser.ens.map, attsM, isoToUser.fks.map, schemaStr, schemaStr2, checkJava);
Map<Att, Triple<Var, En, Term<Ty, En, Sym, Fk, Att, Void, Void>>> attsM2 = new HashMap<>();
for (Att k : schemaStr2.atts.keySet()) {
Var v = new Var("v");
attsM2.put(k, new Triple<>(v, schemaStr2.atts.get(k).first, Term.Att(fun2.apply(k), Term.Var(v))));
}
isoFromUser = new Mapping<>(isoFromUser.ens.map, attsM2, isoFromUser.fks.map, schemaStr2, schemaStr, checkJava);
return wrap(isoToUser, isoFromUser);
}
use of catdata.aql.exp.SchExpRaw.Fk in project fql by CategoricalData.
the class ColimitSchema method initialUser.
private Pair<Schema<Ty, En, Sym, Fk, Att>, Ctx<N, Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att>>> initialUser(AqlOptions options, Collage<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>, Void, Void> col, Set<Triple<Pair<Var, Set<Pair<N, En>>>, Term<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>, Void, Void>, Term<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>, Void, Void>>> eqs, Ctx<Pair<N, En>, Set<Pair<N, En>>> eqcs, Schema<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>> schema) {
Ctx<N, Mapping<Ty, En, Sym, Fk, Att, Set<Pair<N, En>>, Pair<N, Fk>, Pair<N, Att>>> mappings = new Ctx<>();
for (N n : nodes.keySet()) {
Map<Att, Triple<Var, Set<Pair<N, En>>, Term<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>, Void, Void>>> atts = new HashMap<>();
Map<Fk, Pair<Set<Pair<N, En>>, List<Pair<N, Fk>>>> fks = new HashMap<>();
Map<En, Set<Pair<N, En>>> ens0 = new HashMap<>();
Schema<Ty, En, Sym, Fk, Att> s = nodes.get(n);
for (En en : s.ens) {
ens0.put(en, eqcs.get(new Pair<>(n, en)));
}
for (Fk fk : s.fks.keySet()) {
fks.put(fk, new Pair<>(eqcs.get(new Pair<>(n, s.fks.get(fk).first)), Util.singList(new Pair<>(n, fk))));
}
for (Att att : s.atts.keySet()) {
Var v = new Var("v");
Term<Ty, Set<Pair<N, En>>, Sym, Pair<N, Fk>, Pair<N, Att>, Void, Void> t = Term.Att(new Pair<>(n, att), Term.Var(v));
atts.put(att, new Triple<>(v, eqcs.get(new Pair<>(n, s.atts.get(att).first)), t));
}
// TODO aql allow as option?
Mapping<Ty, En, Sym, Fk, Att, Set<Pair<N, En>>, Pair<N, Fk>, Pair<N, Att>> m = new Mapping<>(ens0, atts, fks, nodes.get(n), schema, false);
mappings.put(n, m);
}
Collage<Ty, En, Sym, Fk, Att, Void, Void> colX = new Collage<>(ty.collage());
colX.ens.addAll(col.ens.stream().map(ColimitSchema::conv1).collect(Collectors.toSet()));
colX.atts.map.putAll(col.atts.map((k, v) -> new Pair<>(new Att(conv1(col.atts.get(k).first), conv2Att(k)), new Pair<>(conv1(v.first), v.second))).map);
colX.fks.putAll(col.fks.map((k, v) -> new Pair<>(new Fk(conv1(col.fks.get(k).first), conv2Fk(k)), new Pair<>(conv1(v.first), conv1(v.second)))).map);
Set<Triple<Pair<Var, En>, Term<Ty, En, Sym, Fk, Att, Void, Void>, Term<Ty, En, Sym, Fk, Att, Void, Void>>> eqsX = eqs.stream().map(t -> new Triple<>(new Pair<>(t.first.first, conv1(t.first.second)), conv3(col, t.second), conv3(col, t.third))).collect(Collectors.toSet());
colX.eqs.addAll(col.eqs.stream().map(t -> new Eq<>(t.ctx.map((k, v) -> new Pair<>(k, conv4(v))), conv3(col, t.lhs), conv3(col, t.rhs))).collect(Collectors.toSet()));
// AqlOptions opsX = new AqlOptions(options, colX);
DP<Ty, En, Sym, Fk, Att, Void, Void> dpX = AqlProver.create(options, colX, ty.js);
Schema<Ty, En, Sym, Fk, Att> schemaStr = new Schema<>(ty, colX.ens, colX.atts.map, colX.fks.map, eqsX, dpX, false);
Ctx<N, Mapping<Ty, En, Sym, Fk, Att, En, Fk, Att>> mappingsStr = new Ctx<>();
for (N n : mappings.keySet()) {
mappingsStr.put(n, conv5(col, schemaStr, mappings.get(n)));
}
return new Pair<>(schemaStr, mappingsStr);
}
Aggregations