use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexLiteral in project drill by axbaretto.
the class FieldsReWriterUtil method getFieldNameFromItemStarField.
/**
* Checks if operator call is using item star field.
* Will return field name if true. null otherwise.
*
* @param rexCall operator call
* @param fieldNames list of field names
* @return field name, null otherwise
*/
public static String getFieldNameFromItemStarField(RexCall rexCall, List<String> fieldNames) {
if (!SqlStdOperatorTable.ITEM.equals(rexCall.getOperator())) {
return null;
}
if (rexCall.getOperands().size() != 2) {
return null;
}
if (!(rexCall.getOperands().get(0) instanceof RexInputRef && rexCall.getOperands().get(1) instanceof RexLiteral)) {
return null;
}
// get parent field reference from the first operand (ITEM($0, 'col_name' -> $0)
// and check if it corresponds to the dynamic star
RexInputRef rexInputRef = (RexInputRef) rexCall.getOperands().get(0);
String parentFieldName = fieldNames.get(rexInputRef.getIndex());
if (!SchemaPath.DYNAMIC_STAR.equals(parentFieldName)) {
return null;
}
// get field name from the second operand (ITEM($0, 'col_name') -> col_name)
RexLiteral rexLiteral = (RexLiteral) rexCall.getOperands().get(1);
if (SqlTypeName.CHAR.equals(rexLiteral.getType().getSqlTypeName())) {
return RexLiteral.stringValue(rexLiteral);
}
return null;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexLiteral in project drill by axbaretto.
the class PreProcessLogicalRel method visit.
@Override
public RelNode visit(LogicalProject project) {
final List<RexNode> projExpr = Lists.newArrayList();
for (RexNode rexNode : project.getChildExps()) {
projExpr.add(rexNode.accept(unwrappingExpressionVisitor));
}
project = project.copy(project.getTraitSet(), project.getInput(), projExpr, project.getRowType());
List<RexNode> exprList = new ArrayList<>();
boolean rewrite = false;
for (RexNode rex : project.getChildExps()) {
RexNode newExpr = rex;
if (rex instanceof RexCall) {
RexCall function = (RexCall) rex;
String functionName = function.getOperator().getName();
int nArgs = function.getOperands().size();
// check if its a convert_from or convert_to function
if (functionName.equalsIgnoreCase("convert_from") || functionName.equalsIgnoreCase("convert_to")) {
String literal;
if (nArgs == 2) {
if (function.getOperands().get(1) instanceof RexLiteral) {
try {
literal = ((NlsString) (((RexLiteral) function.getOperands().get(1)).getValue())).getValue();
} catch (final ClassCastException e) {
// Caused by user entering a value with a non-string literal
throw getConvertFunctionInvalidTypeException(function);
}
} else {
// caused by user entering a non-literal
throw getConvertFunctionInvalidTypeException(function);
}
} else {
// Second operand is missing
throw UserException.parseError().message("'%s' expects a string literal as a second argument.", functionName).build(logger);
}
RexBuilder builder = new RexBuilder(factory);
// construct the new function name based on the input argument
String newFunctionName = functionName + literal;
// Look up the new function name in the drill operator table
List<SqlOperator> operatorList = table.getSqlOperator(newFunctionName);
if (operatorList.size() == 0) {
// User typed in an invalid type name
throw getConvertFunctionException(functionName, literal);
}
SqlFunction newFunction = null;
// Find the SqlFunction with the correct args
for (SqlOperator op : operatorList) {
if (op.getOperandTypeChecker().getOperandCountRange().isValidCount(nArgs - 1)) {
newFunction = (SqlFunction) op;
break;
}
}
if (newFunction == null) {
// we are here because we found some dummy convert function. (See DummyConvertFrom and DummyConvertTo)
throw getConvertFunctionException(functionName, literal);
}
// create the new expression to be used in the rewritten project
newExpr = builder.makeCall(newFunction, function.getOperands().subList(0, 1));
rewrite = true;
}
}
exprList.add(newExpr);
}
if (rewrite == true) {
LogicalProject newProject = project.copy(project.getTraitSet(), project.getInput(0), exprList, project.getRowType());
return visitChild(newProject, 0, project.getInput());
}
return visitChild(project, 0, project.getInput());
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexLiteral in project drill by axbaretto.
the class DrillValuesRelBase method convertToJsonNode.
private static JsonNode convertToJsonNode(RelDataType rowType, ImmutableList<ImmutableList<RexLiteral>> tuples) throws IOException {
TokenBuffer out = new TokenBuffer(MAPPER.getFactory().getCodec(), false);
JsonOutput json = new ExtendedJsonOutput(out);
json.writeStartArray();
String[] fields = rowType.getFieldNames().toArray(new String[rowType.getFieldCount()]);
for (List<RexLiteral> row : tuples) {
json.writeStartObject();
int i = 0;
for (RexLiteral field : row) {
json.writeFieldName(fields[i]);
writeLiteral(field, json);
i++;
}
json.writeEndObject();
}
json.writeEndArray();
json.flush();
return out.asParser().readValueAsTree();
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexLiteral in project drill by axbaretto.
the class WindowPrel method toDrill.
protected LogicalExpression toDrill(AggregateCall call, List<String> fn) {
DrillParseContext context = new DrillParseContext(PrelUtil.getSettings(getCluster()));
List<LogicalExpression> args = Lists.newArrayList();
for (Integer i : call.getArgList()) {
final int indexInConstants = i - fn.size();
if (i < fn.size()) {
args.add(new FieldReference(fn.get(i)));
} else {
final RexLiteral constant = constants.get(indexInConstants);
LogicalExpression expr = DrillOptiq.toDrill(context, getInput(), constant);
args.add(expr);
}
}
// for count(1).
if (args.isEmpty()) {
args.add(new ValueExpressions.LongExpression(1l));
}
return new FunctionCall(call.getAggregation().getName().toLowerCase(), args, ExpressionPosition.UNKNOWN);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rex.RexLiteral in project hive by apache.
the class ASTConverter method convert.
private ASTNode convert() throws CalciteSemanticException {
/*
* 1. Walk RelNode Graph; note from, where, gBy.. nodes.
*/
new QBVisitor().go(root);
/*
* 2. convert from node.
*/
QueryBlockInfo qb = convertSource(from);
schema = qb.schema;
hiveAST.from = ASTBuilder.construct(HiveParser.TOK_FROM, "TOK_FROM").add(qb.ast).node();
/*
* 3. convert filterNode
*/
if (where != null) {
ASTNode cond = where.getCondition().accept(new RexVisitor(schema, false, root.getCluster().getRexBuilder()));
hiveAST.where = ASTBuilder.where(cond);
planMapper.link(cond, where);
planMapper.link(cond, RelTreeSignature.of(where));
}
/*
* 4. GBy
*/
if (groupBy != null) {
ASTBuilder b;
boolean groupingSetsExpression = false;
Group aggregateType = groupBy.getGroupType();
switch(aggregateType) {
case SIMPLE:
b = ASTBuilder.construct(HiveParser.TOK_GROUPBY, "TOK_GROUPBY");
break;
case ROLLUP:
case CUBE:
case OTHER:
b = ASTBuilder.construct(HiveParser.TOK_GROUPING_SETS, "TOK_GROUPING_SETS");
groupingSetsExpression = true;
break;
default:
throw new CalciteSemanticException("Group type not recognized");
}
HiveAggregate hiveAgg = (HiveAggregate) groupBy;
if (hiveAgg.getAggregateColumnsOrder() != null) {
// Aggregation columns may have been sorted in specific order
for (int pos : hiveAgg.getAggregateColumnsOrder()) {
addRefToBuilder(b, groupBy.getGroupSet().nth(pos));
}
for (int pos = 0; pos < groupBy.getGroupCount(); pos++) {
if (!hiveAgg.getAggregateColumnsOrder().contains(pos)) {
addRefToBuilder(b, groupBy.getGroupSet().nth(pos));
}
}
} else {
// Aggregation columns have not been reordered
for (int i : groupBy.getGroupSet()) {
addRefToBuilder(b, i);
}
}
// Grouping sets expressions
if (groupingSetsExpression) {
for (ImmutableBitSet groupSet : groupBy.getGroupSets()) {
ASTBuilder expression = ASTBuilder.construct(HiveParser.TOK_GROUPING_SETS_EXPRESSION, "TOK_GROUPING_SETS_EXPRESSION");
for (int i : groupSet) {
addRefToBuilder(expression, i);
}
b.add(expression);
}
}
if (!groupBy.getGroupSet().isEmpty()) {
hiveAST.groupBy = b.node();
}
schema = new Schema(schema, groupBy);
}
/*
* 5. Having
*/
if (having != null) {
ASTNode cond = having.getCondition().accept(new RexVisitor(schema, false, root.getCluster().getRexBuilder()));
hiveAST.having = ASTBuilder.having(cond);
}
/*
* 6. Project
*/
ASTBuilder b = ASTBuilder.construct(HiveParser.TOK_SELECT, "TOK_SELECT");
if (select instanceof Project) {
List<RexNode> childExps = ((Project) select).getProjects();
if (childExps.isEmpty()) {
RexLiteral r = select.getCluster().getRexBuilder().makeExactLiteral(new BigDecimal(1));
ASTNode selectExpr = ASTBuilder.selectExpr(ASTBuilder.literal(r), "1");
b.add(selectExpr);
} else {
int i = 0;
for (RexNode r : childExps) {
ASTNode expr = r.accept(new RexVisitor(schema, r instanceof RexLiteral, select.getCluster().getRexBuilder()));
String alias = select.getRowType().getFieldNames().get(i++);
ASTNode selectExpr = ASTBuilder.selectExpr(expr, alias);
b.add(selectExpr);
}
}
hiveAST.select = b.node();
} else {
// select is UDTF
HiveTableFunctionScan udtf = (HiveTableFunctionScan) select;
List<ASTNode> children = new ArrayList<>();
RexCall call = (RexCall) udtf.getCall();
for (RexNode r : call.getOperands()) {
ASTNode expr = r.accept(new RexVisitor(schema, r instanceof RexLiteral, select.getCluster().getRexBuilder()));
children.add(expr);
}
ASTBuilder sel = ASTBuilder.construct(HiveParser.TOK_SELEXPR, "TOK_SELEXPR");
ASTNode function = buildUDTFAST(call.getOperator().getName(), children);
sel.add(function);
for (String alias : udtf.getRowType().getFieldNames()) {
sel.add(HiveParser.Identifier, alias);
}
b.add(sel);
hiveAST.select = b.node();
}
/*
* 7. Order Use in Order By from the block above. RelNode has no pointer to
* parent hence we need to go top down; but OB at each block really belong
* to its src/from. Hence the need to pass in sort for each block from
* its parent.
* 8. Limit
*/
convertOrderToASTNode(orderLimit);
return hiveAST.getAST();
}
Aggregations