use of org.spf4j.test.log.annotations.PrintLogs in project spf4j by zolyfarkas.
the class AvroQueryTest method testAvroSql.
@Test
@SuppressFBWarnings("PRMC_POSSIBLY_REDUNDANT_METHOD_CALLS")
@PrintLogs(category = "org.codehaus.janino", ideMinLevel = Level.INFO, greedy = true)
public void testAvroSql() throws SqlParseException, RelConversionException, ValidationException, InstantiationException, IllegalAccessException {
Schema recASchema = SchemaBuilder.record("RecordA").fields().name("id").type().intType().noDefault().requiredString("name").endRecord();
Schema subRecSchema = SchemaBuilder.record("SubRecord").fields().name("key").type().stringType().noDefault().requiredString("value").endRecord();
Schema recBSchema = SchemaBuilder.record("RecordB").fields().name("id").type().intType().noDefault().requiredString("name").requiredString("text").name("adate").type(Schemas.dateString()).noDefault().name("meta").type(Schema.createArray(subRecSchema)).noDefault().name("meta2").type(subRecSchema).noDefault().endRecord();
GenericRecordBuilder rb = new GenericRecordBuilder(recASchema, subRecSchema, recBSchema);
Class<? extends SpecificRecordBase> raC = rb.getRecordClass(recASchema);
Class<? extends SpecificRecordBase> rbC = rb.getRecordClass(recBSchema);
Class<? extends SpecificRecordBase> rsC = rb.getRecordClass(subRecSchema);
GenericRecord reca1 = raC.newInstance();
reca1.put("id", 1);
reca1.put("name", "Jim");
GenericRecord subRec = rsC.newInstance();
subRec.put("key", "key1");
subRec.put("value", "val1");
GenericRecord recb1 = rbC.newInstance();
recb1.put("id", 1);
recb1.put("name", "Beam");
recb1.put("text", "bla");
recb1.put("adate", LocalDate.now());
recb1.put("meta", Collections.singletonList(subRec));
recb1.put("meta2", subRec);
GenericRecord recb2 = rbC.newInstance();
recb2.put("id", 2);
recb2.put("name", "Xi");
recb2.put("text", "blabla");
recb2.put("adate", LocalDate.now());
recb2.put("meta", Collections.singletonList(subRec));
recb2.put("meta2", subRec);
SchemaPlus schema = Frameworks.createRootSchema(true);
schema.add("a", new AvroIteratorAsProjectableFilterableTable(recASchema, () -> CloseableIterator.from(Collections.singletonList(reca1).iterator())));
schema.add("b", new AvroIteratorAsProjectableFilterableTable(recBSchema, () -> CloseableIterator.from(Arrays.asList(recb1, recb2).iterator())));
SqlParser.Config cfg = SqlParser.configBuilder().setCaseSensitive(true).setIdentifierMaxLength(255).setLex(Lex.JAVA).build();
FrameworkConfig config = Frameworks.newConfigBuilder().parserConfig(cfg).defaultSchema(schema).build();
Planner planner = Frameworks.getPlanner(config);
SqlNode s = planner.parse("select a.id, a.name as n1, b.name as n2," + " b.adate as adate, b.meta as firstKey, b.meta2.key as blaKey" + " from a" + " inner join b on a.id = b.id where b.text like 'bla%' or b.text like 'cucu%'");
SqlNode validated = planner.validate(s);
RelRoot rel = planner.rel(validated);
RelNode plan = rel.project();
LOG.debug("exec plan", RelOptUtil.toString(plan));
plan = PlannerUtils.pushDownPredicatesAndProjection(plan);
LOG.debug("exec plan optimized", RelOptUtil.toString(plan));
RelDataType rowType = plan.getRowType();
LOG.debug("Return row type: {}", rowType);
Schema from = Types.from(rowType);
LOG.debug("Return row schema: {}", from);
Interpreter interpreter = new Interpreter(new EmbededDataContext(new JavaTypeFactoryImpl(), null), plan);
boolean empty = true;
for (Object[] row : interpreter) {
LOG.debug("RawRow {} with schema {}", row, from);
GenericRecord record = IndexedRecords.fromRecord(from, row);
LOG.debug("Row", record);
empty = false;
}
Assert.assertFalse(empty);
}
Aggregations