use of catdata.aql.exp.SchExpRaw.En in project fql by CategoricalData.
the class InstExpRandom method eval.
// not exactly the smartest way
@Override
public SaturatedInstance<Ty, En, Sym, Fk, Att, Pair<Integer, En>, Pair<Integer, Att>, Pair<Integer, En>, Pair<Integer, Att>> eval(AqlEnv env) {
int seed = (Integer) new AqlOptions(options, null, env.defaults).getOrDefault(AqlOption.random_seed);
Random rand = new Random(seed);
Schema<Ty, En, Sym, Fk, Att> schema = sch.eval(env);
Ctx<En, Collection<Pair<Integer, En>>> ens0 = new Ctx<>();
Ctx<Ty, Collection<Pair<Integer, Att>>> tys = new Ctx<>();
Ctx<Pair<Integer, En>, Ctx<Fk, Pair<Integer, En>>> fks = new Ctx<>();
Ctx<Pair<Integer, En>, Ctx<Att, Term<Ty, Void, Sym, Void, Void, Void, Pair<Integer, Att>>>> atts = new Ctx<>();
for (Ty ty : schema.typeSide.tys) {
tys.put(ty, new LinkedList<>());
}
for (String en : ens.keySet()) {
List<Pair<Integer, En>> l = new LinkedList<>();
int size = ens.get(en);
for (int i = 0; i < size; i++) {
l.add(new Pair<>(i, new En(en)));
Ctx<Att, Term<Ty, Void, Sym, Void, Void, Void, Pair<Integer, Att>>> ctx = new Ctx<>();
for (Att att : schema.attsFrom(new En(en))) {
ctx.put(att, Term.Sk(new Pair<>(i, att)));
tys.get(schema.atts.get(att).second).add(new Pair<>(i, att));
}
atts.put(new Pair<>(i, new En(en)), ctx);
Ctx<Fk, Pair<Integer, En>> ctx0 = new Ctx<>();
for (Fk fk : schema.fksFrom(new En(en))) {
int size0 = ens.get(schema.fks.get(fk).second.str);
Integer k = rand.nextInt(size0);
ctx0.put(fk, new Pair<>(k, schema.fks.get(fk).second));
}
fks.put(new Pair<>(i, new En(en)), ctx0);
}
ens0.put(new En(en), l);
}
ImportAlgebra<Ty, En, Sym, Fk, Att, Pair<Integer, En>, Pair<Integer, Att>> alg = new ImportAlgebra<Ty, En, Sym, Fk, Att, Pair<Integer, En>, Pair<Integer, Att>>(schema, ens0, tys, fks, atts, x -> x.toString(), x -> x.toString(), true);
DP<Ty, En, Sym, Fk, Att, Pair<Integer, En>, Pair<Integer, Att>> dp = new DP<Ty, En, Sym, Fk, Att, Pair<Integer, En>, Pair<Integer, Att>>() {
@Override
public String toStringProver() {
return "Random";
}
@Override
public boolean eq(Ctx<Var, Chc<Ty, En>> ctx, Term<Ty, En, Sym, Fk, Att, Pair<Integer, En>, Pair<Integer, Att>> lhs, Term<Ty, En, Sym, Fk, Att, Pair<Integer, En>, Pair<Integer, Att>> rhs) {
if (!ctx.isEmpty()) {
Util.anomaly();
}
return lhs.equals(rhs);
}
};
return new SaturatedInstance<Ty, En, Sym, Fk, Att, Pair<Integer, En>, Pair<Integer, Att>, Pair<Integer, En>, Pair<Integer, Att>>(alg, dp, false, true, false, new Ctx<>());
}
use of catdata.aql.exp.SchExpRaw.En in project fql by CategoricalData.
the class InstExpRaw method eval.
@Override
public synchronized Instance<Ty, En, Sym, Fk, Att, Gen, Sk, ID, Chc<Sk, Pair<ID, Att>>> eval(AqlEnv env) {
Schema<Ty, En, Sym, Fk, Att> sch = schema.eval(env);
Collage<Ty, En, Sym, Fk, Att, Gen, Sk> col = new Collage<>(sch.collage());
Set<Pair<Term<Ty, En, Sym, Fk, Att, Gen, Sk>, Term<Ty, En, Sym, Fk, Att, Gen, Sk>>> eqs0 = new HashSet<>();
for (String k : imports) {
@SuppressWarnings("unchecked") Instance<Ty, En, Sym, Fk, Att, Gen, Sk, ID, Chc<Sk, Pair<ID, Att>>> v = env.defs.insts.get(k);
col.addAll(v.collage());
eqs0.addAll(v.eqs());
}
for (Pair<String, String> p : gens) {
String gen = p.first;
String ty = p.second;
if (col.ens.contains(new En(ty))) {
col.gens.put(new Gen(gen), new En(ty));
} else if (col.tys.contains(new Ty(ty))) {
col.sks.put(new Sk(gen), new Ty(ty));
} else {
throw new LocException(find("generators", p), "The sort for " + gen + ", namely " + ty + ", is not declared as a type or entity");
}
}
for (Pair<RawTerm, RawTerm> eq : eqs) {
try {
Map<String, Chc<Ty, En>> ctx = Collections.emptyMap();
Triple<Ctx<Var, Chc<Ty, En>>, Term<Ty, En, Sym, Fk, Att, Gen, Sk>, Term<Ty, En, Sym, Fk, Att, Gen, Sk>> eq0 = RawTerm.infer1x(ctx, eq.first, eq.second, null, col, "", sch.typeSide.js).first3();
eqs0.add(new Pair<>(eq0.second, eq0.third));
col.eqs.add(new Eq<>(new Ctx<>(), eq0.second, eq0.third));
} catch (RuntimeException ex) {
ex.printStackTrace();
throw new LocException(find("equations", eq), "In equation " + eq.first + " = " + eq.second + ", " + ex.getMessage());
}
}
AqlOptions strat = new AqlOptions(options, col, env.defaults);
boolean interpret_as_algebra = (boolean) strat.getOrDefault(AqlOption.interpret_as_algebra);
boolean dont_check_closure = (boolean) strat.getOrDefault(AqlOption.import_dont_check_closure_unsafe);
if (interpret_as_algebra) {
Ctx<En, Set<Gen>> ens0x = new Ctx<>(Util.revS(col.gens.map));
Ctx<En, Collection<Gen>> ens0 = ens0x.map(x -> (Collection<Gen>) x);
if (!col.sks.isEmpty()) {
throw new RuntimeException("Cannot have generating labelled nulls with import_as_theory");
}
Ctx<Ty, Collection<Null<?>>> tys0 = new Ctx<>();
for (Ty ty : sch.typeSide.tys) {
tys0.put(ty, new HashSet<>());
}
Ctx<Gen, Ctx<Fk, Gen>> fks0 = new Ctx<>();
Ctx<Gen, Ctx<Att, Term<Ty, Void, Sym, Void, Void, Void, Null<?>>>> atts0 = new Ctx<>();
for (Gen gen : col.gens.keySet()) {
fks0.put(gen, new Ctx<>());
atts0.put(gen, new Ctx<>());
}
for (Pair<Term<Ty, En, Sym, Fk, Att, Gen, Sk>, Term<Ty, En, Sym, Fk, Att, Gen, Sk>> e : eqs0) {
Term<Ty, En, Sym, Fk, Att, Gen, Sk> lhs = e.first;
Term<Ty, En, Sym, Fk, Att, Gen, Sk> rhs = e.second;
if (rhs.gen != null && lhs.fk != null && lhs.arg.gen != null) {
fks0.get(lhs.arg.gen).put(lhs.fk, rhs.gen);
} else if (lhs.gen != null && rhs.fk != null && rhs.arg.gen != null) {
fks0.get(rhs.arg.gen).put(rhs.fk, lhs.gen);
} else if (rhs.obj != null && lhs.att != null && lhs.arg.gen != null) {
atts0.get(lhs.arg.gen).put(lhs.att, Term.Obj(rhs.obj, rhs.ty));
} else if (lhs.obj != null && rhs.att != null && rhs.arg.gen != null) {
atts0.get(rhs.arg.gen).put(rhs.att, Term.Obj(lhs.obj, lhs.ty));
} else {
throw new RuntimeException("import_as_theory not compatible with equation " + lhs + " = " + rhs + "; each equation must be of the form gen.fk=gen or gen.att=javaobject");
}
}
Ctx<Null<?>, Term<Ty, En, Sym, Fk, Att, Gen, Null<?>>> extraRepr = new Ctx<>();
for (Gen gen : col.gens.keySet()) {
for (Att att : sch.attsFrom(col.gens.get(gen))) {
if (!atts0.get(gen).containsKey(att)) {
atts0.get(gen).put(att, InstExpImport.objectToSk(sch, null, gen, att, tys0, extraRepr, false, false));
}
}
}
ImportAlgebra<Ty, En, Sym, Fk, Att, Gen, Null<?>> alg = new ImportAlgebra<Ty, En, Sym, Fk, Att, Gen, Null<?>>(sch, ens0, tys0, fks0, atts0, Object::toString, Object::toString, dont_check_closure);
return new SaturatedInstance(alg, alg, (Boolean) strat.getOrDefault(AqlOption.require_consistency), (Boolean) strat.getOrDefault(AqlOption.allow_java_eqs_unsafe), true, extraRepr);
}
InitialAlgebra<Ty, En, Sym, Fk, Att, Gen, Sk, ID> initial = new InitialAlgebra<>(strat, sch, col, new It(), Object::toString, Object::toString);
return new LiteralInstance<>(sch, col.gens.map, col.sks.map, eqs0, initial.dp(), initial, (Boolean) strat.getOrDefault(AqlOption.require_consistency), (Boolean) strat.getOrDefault(AqlOption.allow_java_eqs_unsafe));
}
use of catdata.aql.exp.SchExpRaw.En in project fql by CategoricalData.
the class CombinatorParser method instExp.
@SuppressWarnings({ "unchecked", "rawtypes" })
private static void instExp() {
Parser<InstExpCoProdFull> l2 = Parsers.tuple(token("coproduct"), ident.sepBy(token("+")), token(":"), sch_ref.lazy(), options.between(token("{"), token("}")).optional()).map(x -> new InstExpCoProdFull(x.b, x.d, Util.newIfNull(x.e)));
Parser<InstExp<?, ?, ?, ?, ?, ?, ?, ?, ?>> var = ident.map(InstExpVar::new), empty = Parsers.tuple(token("empty"), token(":"), sch_ref.get()).map(x -> new InstExpEmpty<>(x.c)), pi = Parsers.tuple(token("pi"), map_ref.lazy(), inst_ref.lazy(), options.between(token("{"), token("}")).optional()).map(x -> new InstExpPi(x.b, x.c, x.d == null ? new HashMap<>() : Util.toMapSafely(x.d))), sigma = Parsers.tuple(token("sigma"), map_ref.lazy(), inst_ref.lazy(), options.between(token("{"), token("}")).optional()).map(x -> new InstExpSigma(x.b, x.c, x.d == null ? new HashMap<>() : Util.toMapSafely(x.d))), sigma_chase = Parsers.tuple(token("sigma_chase"), map_ref.lazy(), inst_ref.lazy(), options.between(token("{"), token("}")).optional()).map(x -> new InstExpSigmaChase(x.b, x.c, x.d == null ? new HashMap<>() : Util.toMapSafely(x.d))), frozen = Parsers.tuple(token("frozen"), query_ref.lazy(), ident).map(x -> new InstExpFrozen(x.b, new En(x.c))), delta = Parsers.tuple(token("delta"), map_ref.lazy(), inst_ref.lazy()).map(x -> new InstExpDelta(x.b, x.c)), distinct = Parsers.tuple(token("distinct"), inst_ref.lazy()).map(x -> new InstExpDistinct(x.b)), anon = Parsers.tuple(token("anonymize"), inst_ref.lazy()).map(x -> new InstExpAnonymize(x.b)), eval = Parsers.tuple(token("eval"), query_ref.lazy(), inst_ref.lazy(), options.between(token("{"), token("}")).optional()).map(x -> new InstExpEval(x.b, x.c, x.d == null ? new LinkedList<>() : x.d)), dom = Parsers.tuple(token("src"), trans_ref.lazy()).map(x -> new InstExpDom(x.b)), cod = Parsers.tuple(token("dst"), trans_ref.lazy()).map(x -> new InstExpCod(x.b)), chase = Parsers.tuple(token("chase"), edsExp(), inst_ref.lazy(), options.between(token("{"), token("}")).optional()).map(x -> new InstExpChase(x.b, x.c, x.d == null ? new LinkedList<>() : x.d)), coeval = Parsers.tuple(token("coeval"), query_ref.lazy(), inst_ref.lazy(), options.between(token("{"), token("}")).optional()).map(x -> new InstExpCoEval(x.b, x.c, x.d == null ? new LinkedList<>() : x.d));
Parser ret = Parsers.or(sigma_chase, l2, pi, frozen, instExpCsvQuot(), instExpJdbcQuot(), instExpCoProd(), instExpRand(), instExpCoEq(), instExpJdbcAll(), chase, instExpJdbc(), empty, instExpRaw(), var, sigma, delta, distinct, eval, colimInstExp(), dom, anon, cod, instExpCsv(), coeval, parens(inst_ref), instExpQuotient());
inst_ref.set(ret);
}
use of catdata.aql.exp.SchExpRaw.En in project fql by CategoricalData.
the class QueryExpRawSimple method eval.
// TODO aql merge with queryexpraw
@Override
public Query<Ty, En, Sym, Fk, Att, En, Fk, Att> eval(AqlEnv env) {
Schema<Ty, En, Sym, Fk, Att> src0 = src.eval(env);
Collage<Ty, En, Sym, Fk, Att, Void, Void> srcCol = src0.collage();
En En = new En("Q");
AqlOptions ops = new AqlOptions(block.options, null, env.defaults);
boolean doNotCheckEqs = (Boolean) ops.getOrDefault(AqlOption.dont_validate_unsafe);
boolean elimRed = (Boolean) ops.getOrDefault(AqlOption.query_remove_redundancy);
boolean checkJava = !(Boolean) ops.getOrDefault(AqlOption.allow_java_eqs_unsafe);
Ctx<En, Triple<Ctx<Var, En>, Collection<Eq<Ty, En, Sym, Fk, Att, Var, Var>>, AqlOptions>> ens0 = new Ctx<>();
Ctx<Att, Term<Ty, En, Sym, Fk, Att, Var, Var>> atts0 = new Ctx<>();
Ctx<Fk, Pair<Ctx<Var, Term<Void, En, Void, Fk, Void, Var, Void>>, Boolean>> fks0 = new Ctx<>();
Ctx<En, Collage<Ty, En, Sym, Fk, Att, Var, Var>> cols = new Ctx<>();
QueryExpRaw.processBlock(block.options, env, src0, ens0, cols, block, new Ctx<>());
Collage<Ty, En, Sym, Fk, Att, Void, Void> colForDst = new Collage<>(src0.typeSide.collage());
colForDst.ens.add(En);
for (Pair<Att, RawTerm> p : block.atts) {
Map<String, Chc<Ty, En>> s = QueryExpRaw.unVar(cols.get(En).gens).<Ty>inRight().map;
Term<Ty, catdata.aql.exp.SchExpRaw.En, Sym, Fk, Att, Gen, Sk> term = RawTerm.infer1x(s, p.second, p.second, null, srcCol.convert(), "", src0.typeSide.js).second;
Chc<Ty, En> ty = srcCol.type(new Ctx<>(s).map((k, v) -> new Pair<>(new Var(k), v)), term.convert());
if (!ty.left) {
throw new LocException(find("attributes", p), "In return clause for " + p.first + ", the type is " + ty.r + ", which is an entity.");
}
colForDst.atts.put(p.first, new Pair<>(En, ty.l));
}
DP<Ty, En, Sym, Fk, Att, Void, Void> dp = AqlProver.create(ops, colForDst, src0.typeSide.js);
Schema<Ty, En, Sym, Fk, Att> dst0 = new Schema<Ty, En, Sym, Fk, Att>(src0.typeSide, colForDst.ens, colForDst.atts.map, colForDst.fks.map, new HashSet<>(), dp, checkJava);
for (Pair<Att, RawTerm> p : block.atts) {
try {
QueryExpRaw.processAtt(src0, dst0, ens0, atts0, cols, p, new Ctx<>());
} catch (RuntimeException ex) {
ex.printStackTrace();
throw new LocException(find("attributes", p), "In return clause for " + p.first + ", " + ex.getMessage());
}
}
// TODO aql
return Query.makeQuery(ens0, atts0, fks0, src0, dst0, doNotCheckEqs, elimRed);
}
use of catdata.aql.exp.SchExpRaw.En in project fql by CategoricalData.
the class TransExpImport method eval.
@Override
public Transform<Ty, En, Sym, Fk, Att, Gen1, Sk1, Gen2, Sk2, X1, Y1, X2, Y2> eval(AqlEnv env) {
Instance<Ty, En, Sym, Fk, Att, Gen1, Sk1, X1, Y1> src0 = src.eval(env);
Instance<Ty, En, Sym, Fk, Att, Gen2, Sk2, X2, Y2> dst0 = dst.eval(env);
if (!src0.schema().equals(dst0.schema())) {
throw new RuntimeException("Schema of instance source is " + src0 + " but schema of target instance is " + dst0);
}
Schema<Ty, En, Sym, Fk, Att> sch = src0.schema();
for (String o : map.keySet()) {
if (!sch.ens.contains(new En(o))) {
throw new RuntimeException("there is an import for " + o + ", which is not an entity in the schema ");
}
}
gens = new Ctx<>();
sks = new Ctx<>();
op = new AqlOptions(options, null, env.defaults);
dontValidateEqs = (Boolean) op.getOrDefault(AqlOption.dont_validate_unsafe);
boolean autoMapNulls = (Boolean) op.getOrDefault(AqlOption.map_nulls_arbitrarily_unsafe);
for (Sk1 sk : src0.sks().keySet()) {
Ty ty = src0.sks().get(sk);
Set<Sk2> xxx = Util.revS(dst0.sks().map).get(ty);
if (xxx.isEmpty()) {
throw new RuntimeException("Cannot map null " + sk + " to target instance because target instance has no nulls at type " + ty);
}
if (xxx.size() > 1) {
if (autoMapNulls) {
Sk2 sk2 = Util.get0X(xxx);
sks.put(sk, Term.Sk(sk2));
} else {
throw new RuntimeException("Cannot automatically map null " + sk + " to target instance because target instance has " + xxx.size() + " nulls at type " + ty + ". Possible solution: add options map_nulls_arbitrarily_unsafe = true");
}
} else {
Sk2 sk2 = Util.get0(xxx);
sks.put(sk, Term.Sk(sk2));
}
}
try {
Handle h = start(sch);
for (En en : sch.ens) {
if (map.containsKey(en.str)) {
processEn(en, sch, h, map.get(en.str));
}
}
stop(h);
} catch (Exception exn) {
// .getMessage() + "\n\n" + getHelpStr());
throw new RuntimeException(exn);
}
return new LiteralTransform<>(gens.map, sks.map, src0, dst0, dontValidateEqs);
}
Aggregations