use of org.apache.calcite.adapter.jdbc.JdbcConvention in project calcite by apache.
the class JdbcToSparkConverter method implementSpark.
public SparkRel.Result implementSpark(SparkRel.Implementor implementor) {
// Generate:
// ResultSetEnumerable.of(schema.getDataSource(), "select ...")
final BlockBuilder list = new BlockBuilder();
final JdbcRel child = (JdbcRel) getInput();
final PhysType physType = PhysTypeImpl.of(implementor.getTypeFactory(), getRowType(), JavaRowFormat.CUSTOM);
final JdbcConvention jdbcConvention = (JdbcConvention) child.getConvention();
String sql = generateSql(jdbcConvention.dialect);
if (CalcitePrepareImpl.DEBUG) {
System.out.println("[" + sql + "]");
}
final Expression sqlLiteral = list.append("sql", Expressions.constant(sql));
final List<Primitive> primitives = new ArrayList<Primitive>();
for (int i = 0; i < getRowType().getFieldCount(); i++) {
final Primitive primitive = Primitive.ofBoxOr(physType.fieldClass(i));
primitives.add(primitive != null ? primitive : Primitive.OTHER);
}
final Expression primitivesLiteral = list.append("primitives", Expressions.constant(primitives.toArray(new Primitive[primitives.size()])));
final Expression enumerable = list.append("enumerable", Expressions.call(BuiltInMethod.RESULT_SET_ENUMERABLE_OF.method, Expressions.call(Expressions.convert_(jdbcConvention.expression, JdbcSchema.class), BuiltInMethod.JDBC_SCHEMA_DATA_SOURCE.method), sqlLiteral, primitivesLiteral));
list.add(Expressions.return_(null, enumerable));
return implementor.result(physType, list.toBlock());
}
use of org.apache.calcite.adapter.jdbc.JdbcConvention in project calcite by apache.
the class PlannerTest method testPlanTransformWithDiffRuleSetAndConvention.
/**
* Unit test that calls {@link Planner#transform} twice,
* with different rule sets, with different conventions.
*
* <p>{@link org.apache.calcite.adapter.jdbc.JdbcConvention} is different
* from the typical convention in that it is not a singleton. Switching to
* a different instance causes problems unless planner state is wiped clean
* between calls to {@link Planner#transform}.
*/
@Test
public void testPlanTransformWithDiffRuleSetAndConvention() throws Exception {
Program program0 = Programs.ofRules(FilterMergeRule.INSTANCE, EnumerableRules.ENUMERABLE_FILTER_RULE, EnumerableRules.ENUMERABLE_PROJECT_RULE);
JdbcConvention out = new JdbcConvention(null, null, "myjdbc");
Program program1 = Programs.ofRules(new MockJdbcProjectRule(out), new MockJdbcTableRule(out));
Planner planner = getPlanner(null, program0, program1);
SqlNode parse = planner.parse("select T1.\"name\" from \"emps\" as T1 ");
SqlNode validate = planner.validate(parse);
RelNode convert = planner.rel(validate).project();
RelTraitSet traitSet0 = planner.getEmptyTraitSet().replace(EnumerableConvention.INSTANCE);
RelTraitSet traitSet1 = planner.getEmptyTraitSet().replace(out);
RelNode transform = planner.transform(0, traitSet0, convert);
RelNode transform2 = planner.transform(1, traitSet1, transform);
assertThat(toString(transform2), equalTo("JdbcProject(name=[$2])\n" + " MockJdbcTableScan(table=[[hr, emps]])\n"));
}
Aggregations