use of io.mycat.hbt.parser.ParseNode in project Mycat2 by MyCATApache.
the class DrdsSqlCompiler method doHbt.
@SneakyThrows
public Plan doHbt(String hbtText) {
log.debug("reveice hbt");
log.debug(hbtText);
HBTParser hbtParser = new HBTParser(hbtText);
ParseNode statement = hbtParser.statement();
SchemaConvertor schemaConvertor = new SchemaConvertor();
Schema originSchema = schemaConvertor.transforSchema(statement);
RelOptCluster cluster = newCluster();
RelBuilder relBuilder = MycatCalciteSupport.relBuilderFactory.create(cluster, catalogReader);
HBTQueryConvertor hbtQueryConvertor = new HBTQueryConvertor(Collections.emptyList(), relBuilder);
RelNode relNode = hbtQueryConvertor.complie(originSchema);
HepProgramBuilder hepProgramBuilder = new HepProgramBuilder();
hepProgramBuilder.addRuleInstance(CoreRules.AGGREGATE_REDUCE_FUNCTIONS);
hepProgramBuilder.addMatchLimit(512);
HepProgram hepProgram = hepProgramBuilder.build();
HepPlanner hepPlanner = new HepPlanner(hepProgram);
hepPlanner.setRoot(relNode);
RelNode bestExp = hepPlanner.findBestExp();
bestExp = bestExp.accept(new RelShuttleImpl() {
@Override
public RelNode visit(TableScan scan) {
AbstractMycatTable table = scan.getTable().unwrap(AbstractMycatTable.class);
if (table != null) {
if (table instanceof MycatPhysicalTable) {
Partition partition = ((MycatPhysicalTable) table).getPartition();
MycatPhysicalTable mycatPhysicalTable = (MycatPhysicalTable) table;
SqlNode sqlNode = MycatCalciteSupport.INSTANCE.convertToSqlTemplate(scan, MycatCalciteSupport.INSTANCE.getSqlDialectByTargetName(partition.getTargetName()), false);
SqlDialect dialect = MycatCalciteSupport.INSTANCE.getSqlDialectByTargetName(partition.getTargetName());
return new MycatTransientSQLTableScan(cluster, mycatPhysicalTable.getRowType(), partition.getTargetName(), sqlNode.toSqlString(dialect).getSql());
}
}
return super.visit(scan);
}
});
bestExp = bestExp.accept(new RelShuttleImpl() {
@Override
public RelNode visit(TableScan scan) {
return SQLRBORewriter.view(scan).orElse(scan);
}
});
MycatRel mycatRel = optimizeWithCBO(bestExp, Collections.emptyList());
CodeExecuterContext codeExecuterContext = getCodeExecuterContext(ImmutableMap.of(), mycatRel, false);
return new PlanImpl(mycatRel, codeExecuterContext, mycatRel.getRowType().getFieldNames());
}
use of io.mycat.hbt.parser.ParseNode in project Mycat2 by MyCATApache.
the class SchemaConvertor method getOrderItem.
public OrderItem getOrderItem(ParseNode parseNode) {
CallExpr parseNode1 = (CallExpr) parseNode;
List<ParseNode> exprs = parseNode1.getArgs().getExprs();
String identifier = exprs.get(0).toString();
Direction direction = Direction.parse(exprs.get(1).toString());
return order(identifier, direction);
}
use of io.mycat.hbt.parser.ParseNode in project Mycat2 by MyCATApache.
the class SchemaConvertor method fieldType.
public FieldType fieldType(ParseNode parseNode) {
CallExpr parseNode1 = (CallExpr) parseNode;
String name = parseNode1.getName();
List<ParseNode> exprs = parseNode1.getArgs().getExprs();
String id = exprs.get(0).toString();
String type = exprs.get(1).toString();
final boolean nullable = Optional.ofNullable(getArg(exprs, 2)).map(i -> Boolean.parseBoolean(i)).orElse(true);
final Integer precision = Optional.ofNullable(getArg(exprs, 3)).map(i -> Integer.parseInt(i)).orElse(null);
final Integer scale = Optional.ofNullable(getArg(exprs, 4)).map(i -> Integer.parseInt(i)).orElse(null);
return new FieldType(id, type, nullable, precision, scale);
}
use of io.mycat.hbt.parser.ParseNode in project Mycat2 by MyCATApache.
the class SchemaConvertor method fields.
public List<FieldType> fields(ParseNode fields) {
CallExpr callExpr = (CallExpr) fields;
List<ParseNode> exprs = callExpr.getArgs().getExprs();
return exprs.stream().map(i -> fieldType(i)).collect(Collectors.toList());
}
use of io.mycat.hbt.parser.ParseNode in project Mycat2 by MyCATApache.
the class SchemaConvertor method transforSchema.
public Schema transforSchema(ParseNode parseNode) {
if (parseNode instanceof CallExpr) {
CallExpr node = (CallExpr) parseNode;
String name = node.getName().toLowerCase();
List<ParseNode> exprList = node.getArgs().getExprs();
HBTOp op = map.get(name);
if (op == null) {
System.err.println(name);
Objects.requireNonNull(name);
}
switch(op) {
case UNION_DISTINCT:
case UNION_ALL:
case EXCEPT_DISTINCT:
case EXCEPT_ALL:
case INTERSECT_DISTINCT:
case INTERSECT_ALL:
{
List<Schema> collect = exprList.stream().map(expr -> transforSchema(expr)).collect(Collectors.toList());
return set(op, collect);
}
case FROM_TABLE:
{
List<String> collect = exprList.stream().map(i -> i.toString()).collect(Collectors.toList());
return fromTable(collect);
}
case FROM_REL_TO_SQL:
{
Schema schema = transforSchema(exprList.get(1));
return new FromRelToSqlSchema(exprList.get(0).toString(), schema);
}
case FROM_SQL:
{
List<FieldType> fieldTypes;
String targetName = null;
String sql = null;
switch(exprList.size()) {
case 2:
{
targetName = exprList.get(0).toString();
fieldTypes = Collections.emptyList();
sql = exprList.get(1).toString();
break;
}
case 3:
{
targetName = exprList.get(0).toString();
fieldTypes = fields(exprList.get(1));
sql = exprList.get(2).toString();
break;
}
default:
throw new IllegalArgumentException();
}
return new FromSqlSchema(fieldTypes, targetName, sql);
}
case FILTER_FROM_TABLE:
{
List<String> collect = exprList.subList(1, exprList.size()).stream().map(i -> i.toString()).collect(Collectors.toList());
return new FilterFromTableSchema(transforExpr(exprList.get(0)), collect);
}
case MAP:
{
List<Expr> collect = exprList.subList(1, exprList.size()).stream().map(i -> transforExpr(i)).collect(Collectors.toList());
Schema schema = transforSchema(exprList.get(0));
return map(schema, collect);
}
case FILTER:
{
Schema schema = transforSchema(exprList.get(0));
Expr expr = transforExpr(exprList.get(1));
return filter(schema, expr);
}
case LIMIT:
{
Schema schema = transforSchema(exprList.get(0));
Number offset = getNumber(exprList.get(1));
Number limit = getNumber(exprList.get(2));
return limit(schema, offset, limit);
}
case ORDER:
{
List<OrderItem> orderItemList = order(exprList.subList(1, exprList.size()));
Schema schema = transforSchema(exprList.get(0));
return orderBy(schema, orderItemList);
}
case GROUP:
{
int size = exprList.size();
CallExpr source = (CallExpr) exprList.get(0);
CallExpr keys = (CallExpr) exprList.get(1);
Schema schema = transforSchema(source);
List<AggregateCall> aggregating = Collections.emptyList();
List<GroupKey> groupkeys = keys(keys);
switch(size) {
case 2:
{
break;
}
case 3:
{
CallExpr exprs = (CallExpr) exprList.get(2);
aggregating = aggregating(exprs);
break;
}
default:
throw new UnsupportedOperationException();
}
return groupBy(schema, groupkeys, aggregating);
}
case TABLE:
{
List<FieldType> fields = fields(exprList.get(0));
List<Object> values = values(exprList.get(1));
return table(fields, values);
}
case DISTINCT:
{
Schema schema = transforSchema(exprList.get(0));
return distinct(schema);
}
case RENAME:
{
Schema schema = transforSchema(exprList.get(0));
List<String> iterms = exprList.subList(1, exprList.size()).stream().map(i -> i.toString()).collect(Collectors.toList());
return new RenameSchema(schema, iterms);
}
case INNER_JOIN:
case LEFT_JOIN:
case RIGHT_JOIN:
case FULL_JOIN:
case SEMI_JOIN:
case ANTI_JOIN:
{
Expr expr = transforExpr(exprList.get(0));
Schema schema = transforSchema(exprList.get(1));
Schema schema2 = transforSchema(exprList.get(2));
return join(op, expr, schema, schema2);
}
case CORRELATE_INNER_JOIN:
case CORRELATE_LEFT_JOIN:
{
String refName = exprList.get(0).toString();
Schema leftschema = transforSchema(exprList.get(1));
Schema rightschema = transforSchema(exprList.get(2));
return correlate(op, refName, leftschema, rightschema);
}
case EXPLAIN:
{
Schema schema = transforSchema(((CallExpr) (parseNode)).getArgs().getExprs().get(0));
return new CommandSchema(HBTOp.EXPLAIN, schema);
}
case EXPLAIN_SQL:
{
Schema schema = transforSchema(((CallExpr) (parseNode)).getArgs().getExprs().get(0));
return new CommandSchema(HBTOp.EXPLAIN_SQL, schema);
}
case MODIFY_FROM_SQL:
{
return new ModifyFromSql(exprList.get(0).toString(), exprList.get(1).toString());
}
case MERGE_MODIFY:
{
return new MergeModify(exprList.stream().map(i -> (ModifyFromSql) transforSchema(i)).collect(Collectors.toList()));
}
default:
{
throw new UnsupportedOperationException();
}
}
} else if (parseNode instanceof ParenthesesExpr) {
ParenthesesExpr parseNode1 = (ParenthesesExpr) parseNode;
if (parseNode1.getExprs().size() == 1) {
return transforSchema(parseNode1.getExprs().get(0));
} else {
}
} else {
return null;
}
return null;
}
Aggregations