use of io.mycat.hbt.ast.HBTOp in project Mycat2 by MyCATApache.
the class SchemaConvertor method transforSchema.
public Schema transforSchema(ParseNode parseNode) {
if (parseNode instanceof CallExpr) {
CallExpr node = (CallExpr) parseNode;
String name = node.getName().toLowerCase();
List<ParseNode> exprList = node.getArgs().getExprs();
HBTOp op = map.get(name);
if (op == null) {
System.err.println(name);
Objects.requireNonNull(name);
}
switch(op) {
case UNION_DISTINCT:
case UNION_ALL:
case EXCEPT_DISTINCT:
case EXCEPT_ALL:
case INTERSECT_DISTINCT:
case INTERSECT_ALL:
{
List<Schema> collect = exprList.stream().map(expr -> transforSchema(expr)).collect(Collectors.toList());
return set(op, collect);
}
case FROM_TABLE:
{
List<String> collect = exprList.stream().map(i -> i.toString()).collect(Collectors.toList());
return fromTable(collect);
}
case FROM_REL_TO_SQL:
{
Schema schema = transforSchema(exprList.get(1));
return new FromRelToSqlSchema(exprList.get(0).toString(), schema);
}
case FROM_SQL:
{
List<FieldType> fieldTypes;
String targetName = null;
String sql = null;
switch(exprList.size()) {
case 2:
{
targetName = exprList.get(0).toString();
fieldTypes = Collections.emptyList();
sql = exprList.get(1).toString();
break;
}
case 3:
{
targetName = exprList.get(0).toString();
fieldTypes = fields(exprList.get(1));
sql = exprList.get(2).toString();
break;
}
default:
throw new IllegalArgumentException();
}
return new FromSqlSchema(fieldTypes, targetName, sql);
}
case FILTER_FROM_TABLE:
{
List<String> collect = exprList.subList(1, exprList.size()).stream().map(i -> i.toString()).collect(Collectors.toList());
return new FilterFromTableSchema(transforExpr(exprList.get(0)), collect);
}
case MAP:
{
List<Expr> collect = exprList.subList(1, exprList.size()).stream().map(i -> transforExpr(i)).collect(Collectors.toList());
Schema schema = transforSchema(exprList.get(0));
return map(schema, collect);
}
case FILTER:
{
Schema schema = transforSchema(exprList.get(0));
Expr expr = transforExpr(exprList.get(1));
return filter(schema, expr);
}
case LIMIT:
{
Schema schema = transforSchema(exprList.get(0));
Number offset = getNumber(exprList.get(1));
Number limit = getNumber(exprList.get(2));
return limit(schema, offset, limit);
}
case ORDER:
{
List<OrderItem> orderItemList = order(exprList.subList(1, exprList.size()));
Schema schema = transforSchema(exprList.get(0));
return orderBy(schema, orderItemList);
}
case GROUP:
{
int size = exprList.size();
CallExpr source = (CallExpr) exprList.get(0);
CallExpr keys = (CallExpr) exprList.get(1);
Schema schema = transforSchema(source);
List<AggregateCall> aggregating = Collections.emptyList();
List<GroupKey> groupkeys = keys(keys);
switch(size) {
case 2:
{
break;
}
case 3:
{
CallExpr exprs = (CallExpr) exprList.get(2);
aggregating = aggregating(exprs);
break;
}
default:
throw new UnsupportedOperationException();
}
return groupBy(schema, groupkeys, aggregating);
}
case TABLE:
{
List<FieldType> fields = fields(exprList.get(0));
List<Object> values = values(exprList.get(1));
return table(fields, values);
}
case DISTINCT:
{
Schema schema = transforSchema(exprList.get(0));
return distinct(schema);
}
case RENAME:
{
Schema schema = transforSchema(exprList.get(0));
List<String> iterms = exprList.subList(1, exprList.size()).stream().map(i -> i.toString()).collect(Collectors.toList());
return new RenameSchema(schema, iterms);
}
case INNER_JOIN:
case LEFT_JOIN:
case RIGHT_JOIN:
case FULL_JOIN:
case SEMI_JOIN:
case ANTI_JOIN:
{
Expr expr = transforExpr(exprList.get(0));
Schema schema = transforSchema(exprList.get(1));
Schema schema2 = transforSchema(exprList.get(2));
return join(op, expr, schema, schema2);
}
case CORRELATE_INNER_JOIN:
case CORRELATE_LEFT_JOIN:
{
String refName = exprList.get(0).toString();
Schema leftschema = transforSchema(exprList.get(1));
Schema rightschema = transforSchema(exprList.get(2));
return correlate(op, refName, leftschema, rightschema);
}
case EXPLAIN:
{
Schema schema = transforSchema(((CallExpr) (parseNode)).getArgs().getExprs().get(0));
return new CommandSchema(HBTOp.EXPLAIN, schema);
}
case EXPLAIN_SQL:
{
Schema schema = transforSchema(((CallExpr) (parseNode)).getArgs().getExprs().get(0));
return new CommandSchema(HBTOp.EXPLAIN_SQL, schema);
}
case MODIFY_FROM_SQL:
{
return new ModifyFromSql(exprList.get(0).toString(), exprList.get(1).toString());
}
case MERGE_MODIFY:
{
return new MergeModify(exprList.stream().map(i -> (ModifyFromSql) transforSchema(i)).collect(Collectors.toList()));
}
default:
{
throw new UnsupportedOperationException();
}
}
} else if (parseNode instanceof ParenthesesExpr) {
ParenthesesExpr parseNode1 = (ParenthesesExpr) parseNode;
if (parseNode1.getExprs().size() == 1) {
return transforSchema(parseNode1.getExprs().get(0));
} else {
}
} else {
return null;
}
return null;
}
use of io.mycat.hbt.ast.HBTOp in project Mycat2 by MyCATApache.
the class ExplainVisitor method visit.
@Override
public void visit(SetOpSchema setOpSchema) {
HBTOp op = setOpSchema.getOp();
append(op.getFun());
enter();
append("(");
List<Schema> schemas = setOpSchema.getSchemas();
int size = schemas.size();
for (int i = 0; i < size - 1; i++) {
Schema schema = schemas.get(i);
append("\n ");
schema.accept(this);
append(",");
}
append("\n ");
schemas.get(size - 1).accept(this);
append("\n");
append(")");
leave();
}
use of io.mycat.hbt.ast.HBTOp in project Mycat2 by MyCATApache.
the class HBTQueryConvertor method toRex.
private RexNode toRex(Expr node) {
HBTOp op = node.getOp();
switch(op) {
case IDENTIFIER:
{
String value = ((Identifier) node).getValue();
if (value.startsWith("$") && Character.isDigit(value.charAt(value.length() - 1))) {
// 按照下标引用
String substring = value.substring(1);
if (joinCount > 1) {
if (substring.startsWith("$")) {
ImmutableList<RexNode> fields = relBuilder.fields();
return relBuilder.field(2, 1, Integer.parseInt(substring.substring(1)));
}
return relBuilder.field(2, 0, Integer.parseInt(substring));
}
return relBuilder.field(Integer.parseInt(substring));
}
if (joinCount > 1) {
try {
// 按照数据源查找字段
for (int i = 0; i < joinCount; i++) {
List<String> fieldNames = relBuilder.peek(i).getRowType().getFieldNames();
int indexOf = fieldNames.indexOf(value);
if (indexOf > -1) {
try {
return relBuilder.field(joinCount, i, indexOf);
} catch (Exception e) {
log.warn("may be a bug");
log.error("", e);
}
}
}
} catch (Exception e) {
log.warn("may be a bug");
log.error("", e);
}
try {
char c = value.charAt(value.length() - 1);
// 按照join命名规则查找
if (c == '0') {
return relBuilder.field(2, 1, value);
}
} catch (Exception e) {
log.warn("may be a bug");
log.error("", e);
}
return relBuilder.field(value);
} else {
return relBuilder.field(value);
}
}
case LITERAL:
{
Literal node1 = (Literal) node;
return relBuilder.literal(node1.getValue());
}
default:
{
if (node.op == AS_COLUMN_NAME) {
return as(node);
} else if (node.op == REF) {
return ref(node);
} else if (node.op == CAST) {
RexNode rexNode = toRex(node.getNodes().get(0));
Identifier type = (Identifier) node.getNodes().get(1);
return relBuilder.cast(rexNode, toType(type.getValue()).getSqlTypeName());
} else if (node.op == PARAM) {
return relBuilder.literal(params.get(paramIndex++));
} else if (node.op == FUN) {
Fun node2 = (Fun) node;
if ("as".equals(node2.getFunctionName())) {
return as(node);
}
if ("ref".equals(node2.getFunctionName())) {
return ref(node);
}
return this.relBuilder.call(op(node2.getFunctionName()), toRex(node.getNodes()));
} else {
throw new UnsupportedOperationException();
}
}
}
// throw new UnsupportedOperationException();
}
Aggregations