Search in sources :

Example 11 with RelDataTypeFactory

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project drill by apache.

the class DrillConstExecutor method reduce.

@Override
@SuppressWarnings("deprecation")
public void reduce(RexBuilder rexBuilder, List<RexNode> constExps, List<RexNode> reducedValues) {
    for (RexNode newCall : constExps) {
        LogicalExpression logEx = DrillOptiq.toDrill(new DrillParseContext(plannerSettings), (RelNode) null, /* input rel */
        newCall);
        ErrorCollectorImpl errors = new ErrorCollectorImpl();
        LogicalExpression materializedExpr = ExpressionTreeMaterializer.materialize(logEx, null, errors, funcImplReg);
        if (errors.getErrorCount() != 0) {
            String message = String.format("Failure while materializing expression in constant expression evaluator [%s].  Errors: %s", newCall.toString(), errors.toString());
            throw UserException.planError().message(message).build(logger);
        }
        if (NON_REDUCIBLE_TYPES.contains(materializedExpr.getMajorType().getMinorType())) {
            logger.debug("Constant expression not folded due to return type {}, complete expression: {}", materializedExpr.getMajorType(), ExpressionStringBuilder.toString(materializedExpr));
            reducedValues.add(newCall);
            continue;
        }
        ValueHolder output = InterpreterEvaluator.evaluateConstantExpr(udfUtilities, materializedExpr);
        RelDataTypeFactory typeFactory = rexBuilder.getTypeFactory();
        if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL && TypeHelper.isNull(output)) {
            SqlTypeName sqlTypeName = TypeInferenceUtils.getCalciteTypeFromDrillType(materializedExpr.getMajorType().getMinorType());
            if (sqlTypeName == null) {
                String message = String.format("Error reducing constant expression, unsupported type: %s.", materializedExpr.getMajorType().getMinorType());
                throw UserException.unsupportedError().message(message).build(logger);
            }
            RelDataType type = TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, sqlTypeName, true);
            reducedValues.add(rexBuilder.makeNullLiteral(type));
            continue;
        }
        Function<ValueHolder, RexNode> literator = valueHolder -> {
            switch(materializedExpr.getMajorType().getMinorType()) {
                case INT:
                    {
                        int value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ? ((NullableIntHolder) valueHolder).value : ((IntHolder) valueHolder).value;
                        return rexBuilder.makeLiteral(new BigDecimal(value), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTEGER, newCall.getType().isNullable()), false);
                    }
                case BIGINT:
                    {
                        long value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ? ((NullableBigIntHolder) valueHolder).value : ((BigIntHolder) valueHolder).value;
                        return rexBuilder.makeLiteral(new BigDecimal(value), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.BIGINT, newCall.getType().isNullable()), false);
                    }
                case FLOAT4:
                    {
                        float value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ? ((NullableFloat4Holder) valueHolder).value : ((Float4Holder) valueHolder).value;
                        // BigDecimal cannot represent them.
                        if (!Float.isFinite(value)) {
                            return rexBuilder.makeLiteral(Float.toString(value));
                        }
                        return rexBuilder.makeLiteral(new BigDecimal(value), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.FLOAT, newCall.getType().isNullable()), false);
                    }
                case FLOAT8:
                    {
                        double value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ? ((NullableFloat8Holder) valueHolder).value : ((Float8Holder) valueHolder).value;
                        // BigDecimal cannot represent them.
                        if (!Double.isFinite(value)) {
                            return rexBuilder.makeLiteral(Double.toString(value));
                        }
                        return rexBuilder.makeLiteral(new BigDecimal(value), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DOUBLE, newCall.getType().isNullable()), false);
                    }
                case VARCHAR:
                    {
                        String value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ? StringFunctionHelpers.getStringFromVarCharHolder((NullableVarCharHolder) valueHolder) : StringFunctionHelpers.getStringFromVarCharHolder((VarCharHolder) valueHolder);
                        RelDataType type = typeFactory.createSqlType(SqlTypeName.VARCHAR, newCall.getType().getPrecision());
                        RelDataType typeWithNullability = typeFactory.createTypeWithNullability(type, newCall.getType().isNullable());
                        return rexBuilder.makeLiteral(value, typeWithNullability, false);
                    }
                case BIT:
                    {
                        boolean value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ? ((NullableBitHolder) valueHolder).value == 1 : ((BitHolder) valueHolder).value == 1;
                        return rexBuilder.makeLiteral(value, TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.BOOLEAN, newCall.getType().isNullable()), false);
                    }
                case DATE:
                    {
                        Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ? new DateTime(((NullableDateHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null) : new DateTime(((DateHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null);
                        return rexBuilder.makeLiteral(DateString.fromCalendarFields(value), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DATE, newCall.getType().isNullable()), false);
                    }
                case DECIMAL9:
                    {
                        long value;
                        int scale;
                        if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
                            NullableDecimal9Holder decimal9Out = (NullableDecimal9Holder) valueHolder;
                            value = decimal9Out.value;
                            scale = decimal9Out.scale;
                        } else {
                            Decimal9Holder decimal9Out = (Decimal9Holder) valueHolder;
                            value = decimal9Out.value;
                            scale = decimal9Out.scale;
                        }
                        return rexBuilder.makeLiteral(new BigDecimal(BigInteger.valueOf(value), scale), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DECIMAL, newCall.getType().isNullable()), false);
                    }
                case DECIMAL18:
                    {
                        long value;
                        int scale;
                        if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
                            NullableDecimal18Holder decimal18Out = (NullableDecimal18Holder) valueHolder;
                            value = decimal18Out.value;
                            scale = decimal18Out.scale;
                        } else {
                            Decimal18Holder decimal18Out = (Decimal18Holder) valueHolder;
                            value = decimal18Out.value;
                            scale = decimal18Out.scale;
                        }
                        return rexBuilder.makeLiteral(new BigDecimal(BigInteger.valueOf(value), scale), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DECIMAL, newCall.getType().isNullable()), false);
                    }
                case VARDECIMAL:
                    {
                        DrillBuf buffer;
                        int start;
                        int end;
                        int scale;
                        int precision;
                        if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
                            NullableVarDecimalHolder varDecimalHolder = (NullableVarDecimalHolder) valueHolder;
                            buffer = varDecimalHolder.buffer;
                            start = varDecimalHolder.start;
                            end = varDecimalHolder.end;
                            scale = varDecimalHolder.scale;
                            precision = varDecimalHolder.precision;
                        } else {
                            VarDecimalHolder varDecimalHolder = (VarDecimalHolder) valueHolder;
                            buffer = varDecimalHolder.buffer;
                            start = varDecimalHolder.start;
                            end = varDecimalHolder.end;
                            scale = varDecimalHolder.scale;
                            precision = varDecimalHolder.precision;
                        }
                        return rexBuilder.makeLiteral(org.apache.drill.exec.util.DecimalUtility.getBigDecimalFromDrillBuf(buffer, start, end - start, scale), typeFactory.createSqlType(SqlTypeName.DECIMAL, precision, scale), false);
                    }
                case DECIMAL28SPARSE:
                    {
                        DrillBuf buffer;
                        int start;
                        int scale;
                        if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
                            NullableDecimal28SparseHolder decimal28Out = (NullableDecimal28SparseHolder) valueHolder;
                            buffer = decimal28Out.buffer;
                            start = decimal28Out.start;
                            scale = decimal28Out.scale;
                        } else {
                            Decimal28SparseHolder decimal28Out = (Decimal28SparseHolder) valueHolder;
                            buffer = decimal28Out.buffer;
                            start = decimal28Out.start;
                            scale = decimal28Out.scale;
                        }
                        return rexBuilder.makeLiteral(org.apache.drill.exec.util.DecimalUtility.getBigDecimalFromSparse(buffer, start * 20, 5, scale), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DECIMAL, newCall.getType().isNullable()), false);
                    }
                case DECIMAL38SPARSE:
                    {
                        DrillBuf buffer;
                        int start;
                        int scale;
                        if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
                            NullableDecimal38SparseHolder decimal38Out = (NullableDecimal38SparseHolder) valueHolder;
                            buffer = decimal38Out.buffer;
                            start = decimal38Out.start;
                            scale = decimal38Out.scale;
                        } else {
                            Decimal38SparseHolder decimal38Out = (Decimal38SparseHolder) valueHolder;
                            buffer = decimal38Out.buffer;
                            start = decimal38Out.start;
                            scale = decimal38Out.scale;
                        }
                        return rexBuilder.makeLiteral(org.apache.drill.exec.util.DecimalUtility.getBigDecimalFromSparse(buffer, start * 24, 6, scale), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.DECIMAL, newCall.getType().isNullable()), false);
                    }
                case TIME:
                    {
                        Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ? new DateTime(((NullableTimeHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null) : new DateTime(((TimeHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null);
                        RelDataType type = typeFactory.createSqlType(SqlTypeName.TIME, newCall.getType().getPrecision());
                        RelDataType typeWithNullability = typeFactory.createTypeWithNullability(type, newCall.getType().isNullable());
                        return rexBuilder.makeLiteral(TimeString.fromCalendarFields(value), typeWithNullability, false);
                    }
                case TIMESTAMP:
                    {
                        Calendar value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ? new DateTime(((NullableTimeStampHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null) : new DateTime(((TimeStampHolder) valueHolder).value, DateTimeZone.UTC).toCalendar(null);
                        RelDataType type = typeFactory.createSqlType(SqlTypeName.TIMESTAMP, newCall.getType().getPrecision());
                        RelDataType typeWithNullability = typeFactory.createTypeWithNullability(type, newCall.getType().isNullable());
                        return rexBuilder.makeLiteral(TimestampString.fromCalendarFields(value), typeWithNullability, false);
                    }
                case INTERVALYEAR:
                    {
                        BigDecimal value = (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) ? new BigDecimal(((NullableIntervalYearHolder) valueHolder).value) : new BigDecimal(((IntervalYearHolder) valueHolder).value);
                        return rexBuilder.makeLiteral(value, TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTERVAL_YEAR_MONTH, newCall.getType().isNullable()), false);
                    }
                case INTERVALDAY:
                    {
                        int days;
                        int milliseconds;
                        if (materializedExpr.getMajorType().getMode() == TypeProtos.DataMode.OPTIONAL) {
                            NullableIntervalDayHolder intervalDayOut = (NullableIntervalDayHolder) valueHolder;
                            days = intervalDayOut.days;
                            milliseconds = intervalDayOut.milliseconds;
                        } else {
                            IntervalDayHolder intervalDayOut = (IntervalDayHolder) valueHolder;
                            days = intervalDayOut.days;
                            milliseconds = intervalDayOut.milliseconds;
                        }
                        return rexBuilder.makeLiteral(new BigDecimal(days * (long) DateUtilities.daysToStandardMillis + milliseconds), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTERVAL_DAY, newCall.getType().isNullable()), false);
                    }
                // as new types may be added in the future.
                default:
                    logger.debug("Constant expression not folded due to return type {}, complete expression: {}", materializedExpr.getMajorType(), ExpressionStringBuilder.toString(materializedExpr));
                    return newCall;
            }
        };
        reducedValues.add(literator.apply(output));
    }
}
Also used : RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) DateTimeZone(org.joda.time.DateTimeZone) VarDecimalHolder(org.apache.drill.exec.expr.holders.VarDecimalHolder) ExpressionTreeMaterializer(org.apache.drill.exec.expr.ExpressionTreeMaterializer) UserException(org.apache.drill.common.exceptions.UserException) TypeHelper(org.apache.drill.exec.expr.TypeHelper) VarCharHolder(org.apache.drill.exec.expr.holders.VarCharHolder) StringFunctionHelpers(org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers) NullableVarCharHolder(org.apache.drill.exec.expr.holders.NullableVarCharHolder) NullableVarDecimalHolder(org.apache.drill.exec.expr.holders.NullableVarDecimalHolder) FunctionImplementationRegistry(org.apache.drill.exec.expr.fn.FunctionImplementationRegistry) ExpressionStringBuilder(org.apache.drill.common.expression.ExpressionStringBuilder) NullableIntervalDayHolder(org.apache.drill.exec.expr.holders.NullableIntervalDayHolder) BigDecimal(java.math.BigDecimal) UdfUtilities(org.apache.drill.exec.ops.UdfUtilities) RexNode(org.apache.calcite.rex.RexNode) Decimal9Holder(org.apache.drill.exec.expr.holders.Decimal9Holder) DateHolder(org.apache.drill.exec.expr.holders.DateHolder) NullableIntervalYearHolder(org.apache.drill.exec.expr.holders.NullableIntervalYearHolder) DrillBuf(io.netty.buffer.DrillBuf) BigIntHolder(org.apache.drill.exec.expr.holders.BigIntHolder) BigInteger(java.math.BigInteger) NullableBigIntHolder(org.apache.drill.exec.expr.holders.NullableBigIntHolder) NullableDecimal9Holder(org.apache.drill.exec.expr.holders.NullableDecimal9Holder) Decimal18Holder(org.apache.drill.exec.expr.holders.Decimal18Holder) DateString(org.apache.calcite.util.DateString) Decimal28SparseHolder(org.apache.drill.exec.expr.holders.Decimal28SparseHolder) NullableFloat4Holder(org.apache.drill.exec.expr.holders.NullableFloat4Holder) TimestampString(org.apache.calcite.util.TimestampString) ErrorCollectorImpl(org.apache.drill.common.expression.ErrorCollectorImpl) LogicalExpression(org.apache.drill.common.expression.LogicalExpression) TypeProtos(org.apache.drill.common.types.TypeProtos) List(java.util.List) NullableTimeStampHolder(org.apache.drill.exec.expr.holders.NullableTimeStampHolder) ValueHolder(org.apache.drill.exec.expr.holders.ValueHolder) Decimal38SparseHolder(org.apache.drill.exec.expr.holders.Decimal38SparseHolder) BitHolder(org.apache.drill.exec.expr.holders.BitHolder) NullableFloat8Holder(org.apache.drill.exec.expr.holders.NullableFloat8Holder) IntervalYearHolder(org.apache.drill.exec.expr.holders.IntervalYearHolder) Float8Holder(org.apache.drill.exec.expr.holders.Float8Holder) Function(java.util.function.Function) Float4Holder(org.apache.drill.exec.expr.holders.Float4Holder) TimeString(org.apache.calcite.util.TimeString) Calendar(java.util.Calendar) InterpreterEvaluator(org.apache.drill.exec.expr.fn.interpreter.InterpreterEvaluator) TimeHolder(org.apache.drill.exec.expr.holders.TimeHolder) NullableBitHolder(org.apache.drill.exec.expr.holders.NullableBitHolder) IntHolder(org.apache.drill.exec.expr.holders.IntHolder) RexExecutor(org.apache.calcite.rex.RexExecutor) RelDataType(org.apache.calcite.rel.type.RelDataType) NullableDecimal18Holder(org.apache.drill.exec.expr.holders.NullableDecimal18Holder) DateUtilities(org.apache.drill.exec.vector.DateUtilities) NullableDecimal38SparseHolder(org.apache.drill.exec.expr.holders.NullableDecimal38SparseHolder) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) NullableDecimal28SparseHolder(org.apache.drill.exec.expr.holders.NullableDecimal28SparseHolder) TypeInferenceUtils(org.apache.drill.exec.planner.sql.TypeInferenceUtils) RexBuilder(org.apache.calcite.rex.RexBuilder) DateTime(org.joda.time.DateTime) RelNode(org.apache.calcite.rel.RelNode) NullableTimeHolder(org.apache.drill.exec.expr.holders.NullableTimeHolder) ImmutableList(org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList) IntervalDayHolder(org.apache.drill.exec.expr.holders.IntervalDayHolder) NullableIntHolder(org.apache.drill.exec.expr.holders.NullableIntHolder) PlannerSettings(org.apache.drill.exec.planner.physical.PlannerSettings) TimeStampHolder(org.apache.drill.exec.expr.holders.TimeStampHolder) NullableDateHolder(org.apache.drill.exec.expr.holders.NullableDateHolder) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) NullableDecimal9Holder(org.apache.drill.exec.expr.holders.NullableDecimal9Holder) BitHolder(org.apache.drill.exec.expr.holders.BitHolder) NullableBitHolder(org.apache.drill.exec.expr.holders.NullableBitHolder) NullableVarDecimalHolder(org.apache.drill.exec.expr.holders.NullableVarDecimalHolder) Decimal9Holder(org.apache.drill.exec.expr.holders.Decimal9Holder) NullableDecimal9Holder(org.apache.drill.exec.expr.holders.NullableDecimal9Holder) VarDecimalHolder(org.apache.drill.exec.expr.holders.VarDecimalHolder) NullableVarDecimalHolder(org.apache.drill.exec.expr.holders.NullableVarDecimalHolder) RelDataType(org.apache.calcite.rel.type.RelDataType) DateString(org.apache.calcite.util.DateString) TimestampString(org.apache.calcite.util.TimestampString) TimeString(org.apache.calcite.util.TimeString) DateTime(org.joda.time.DateTime) NullableIntervalYearHolder(org.apache.drill.exec.expr.holders.NullableIntervalYearHolder) IntervalYearHolder(org.apache.drill.exec.expr.holders.IntervalYearHolder) ErrorCollectorImpl(org.apache.drill.common.expression.ErrorCollectorImpl) LogicalExpression(org.apache.drill.common.expression.LogicalExpression) NullableVarCharHolder(org.apache.drill.exec.expr.holders.NullableVarCharHolder) NullableIntervalYearHolder(org.apache.drill.exec.expr.holders.NullableIntervalYearHolder) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) DrillBuf(io.netty.buffer.DrillBuf) NullableIntervalDayHolder(org.apache.drill.exec.expr.holders.NullableIntervalDayHolder) Decimal18Holder(org.apache.drill.exec.expr.holders.Decimal18Holder) NullableDecimal18Holder(org.apache.drill.exec.expr.holders.NullableDecimal18Holder) NullableIntervalDayHolder(org.apache.drill.exec.expr.holders.NullableIntervalDayHolder) IntervalDayHolder(org.apache.drill.exec.expr.holders.IntervalDayHolder) Calendar(java.util.Calendar) NullableDecimal38SparseHolder(org.apache.drill.exec.expr.holders.NullableDecimal38SparseHolder) ValueHolder(org.apache.drill.exec.expr.holders.ValueHolder) BigDecimal(java.math.BigDecimal) NullableDecimal28SparseHolder(org.apache.drill.exec.expr.holders.NullableDecimal28SparseHolder) NullableDecimal18Holder(org.apache.drill.exec.expr.holders.NullableDecimal18Holder) NullableBitHolder(org.apache.drill.exec.expr.holders.NullableBitHolder) Decimal38SparseHolder(org.apache.drill.exec.expr.holders.Decimal38SparseHolder) NullableDecimal38SparseHolder(org.apache.drill.exec.expr.holders.NullableDecimal38SparseHolder) Decimal28SparseHolder(org.apache.drill.exec.expr.holders.Decimal28SparseHolder) NullableDecimal28SparseHolder(org.apache.drill.exec.expr.holders.NullableDecimal28SparseHolder) VarCharHolder(org.apache.drill.exec.expr.holders.VarCharHolder) NullableVarCharHolder(org.apache.drill.exec.expr.holders.NullableVarCharHolder) RexNode(org.apache.calcite.rex.RexNode)

Example 12 with RelDataTypeFactory

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project drill by apache.

the class ProjectPrel method prepareForLateralUnnestPipeline.

@Override
public Prel prepareForLateralUnnestPipeline(List<RelNode> children) {
    RelDataTypeFactory typeFactory = this.getCluster().getTypeFactory();
    RexBuilder builder = this.getCluster().getRexBuilder();
    List<RexNode> projects = Lists.newArrayList();
    projects.add(builder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 0));
    // right shift the previous field indices.
    projects.addAll(DrillRelOptUtil.transformExprs(builder, this.getProjects(), DrillRelOptUtil.rightShiftColsInRowType(this.getInput().getRowType())));
    List<String> fieldNames = new ArrayList<>();
    List<RelDataType> fieldTypes = new ArrayList<>();
    fieldNames.add("$drill_implicit_field$");
    fieldTypes.add(typeFactory.createSqlType(SqlTypeName.INTEGER));
    for (RelDataTypeField field : this.rowType.getFieldList()) {
        fieldNames.add(field.getName());
        fieldTypes.add(field.getType());
    }
    RelDataType newRowType = typeFactory.createStructType(fieldTypes, fieldNames);
    return (Prel) this.copy(this.getTraitSet(), children.get(0), projects, newRowType);
}
Also used : RelDataTypeField(org.apache.calcite.rel.type.RelDataTypeField) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) ArrayList(java.util.ArrayList) RexBuilder(org.apache.calcite.rex.RexBuilder) RelDataType(org.apache.calcite.rel.type.RelDataType) RexNode(org.apache.calcite.rex.RexNode)

Example 13 with RelDataTypeFactory

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project samza by apache.

the class TranslatorContext method createExpressionCompiler.

private RexToJavaCompiler createExpressionCompiler(RelRoot relRoot) {
    RelDataTypeFactory dataTypeFactory = relRoot.project().getCluster().getTypeFactory();
    RexBuilder rexBuilder = new SamzaSqlRexBuilder(dataTypeFactory);
    return new RexToJavaCompiler(rexBuilder);
}
Also used : RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) RexBuilder(org.apache.calcite.rex.RexBuilder) RexToJavaCompiler(org.apache.samza.sql.data.RexToJavaCompiler)

Example 14 with RelDataTypeFactory

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project flink by apache.

the class HiveParserCalcitePlanner method genUDTFPlan.

private RelNode genUDTFPlan(SqlOperator sqlOperator, String genericUDTFName, String outputTableAlias, List<String> colAliases, HiveParserQB qb, List<RexNode> operands, List<ColumnInfo> opColInfos, RelNode input, boolean inSelect, boolean isOuter) throws SemanticException {
    Preconditions.checkState(!isOuter || !inSelect, "OUTER is not supported for SELECT UDTF");
    // No GROUP BY / DISTRIBUTE BY / SORT BY / CLUSTER BY
    HiveParserQBParseInfo qbp = qb.getParseInfo();
    if (inSelect && !qbp.getDestToGroupBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_GROUP_BY.getMsg());
    }
    if (inSelect && !qbp.getDestToDistributeBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_DISTRIBUTE_BY.getMsg());
    }
    if (inSelect && !qbp.getDestToSortBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_SORT_BY.getMsg());
    }
    if (inSelect && !qbp.getDestToClusterBy().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_NO_CLUSTER_BY.getMsg());
    }
    if (inSelect && !qbp.getAliasToLateralViews().isEmpty()) {
        throw new SemanticException(ErrorMsg.UDTF_LATERAL_VIEW.getMsg());
    }
    LOG.debug("Table alias: " + outputTableAlias + " Col aliases: " + colAliases);
    // Create the object inspector for the input columns and initialize the UDTF
    RelDataType relDataType = HiveParserUtils.inferReturnTypeForOperands(sqlOperator, operands, cluster.getTypeFactory());
    DataType dataType = HiveParserUtils.toDataType(relDataType);
    StructObjectInspector outputOI = (StructObjectInspector) HiveInspectors.getObjectInspector(HiveTypeUtil.toHiveTypeInfo(dataType, false));
    // this should only happen for select udtf
    if (outputTableAlias == null) {
        Preconditions.checkState(inSelect, "Table alias not specified for lateral view");
        String prefix = "select_" + genericUDTFName + "_alias_";
        int i = 0;
        while (qb.getAliases().contains(prefix + i)) {
            i++;
        }
        outputTableAlias = prefix + i;
    }
    if (colAliases.isEmpty()) {
        // user did not specify alias names, infer names from outputOI
        for (StructField field : outputOI.getAllStructFieldRefs()) {
            colAliases.add(field.getFieldName());
        }
    }
    // Make sure that the number of column aliases in the AS clause matches the number of
    // columns output by the UDTF
    int numOutputCols = outputOI.getAllStructFieldRefs().size();
    int numSuppliedAliases = colAliases.size();
    if (numOutputCols != numSuppliedAliases) {
        throw new SemanticException(ErrorMsg.UDTF_ALIAS_MISMATCH.getMsg("expected " + numOutputCols + " aliases " + "but got " + numSuppliedAliases));
    }
    // Generate the output column info's / row resolver using internal names.
    ArrayList<ColumnInfo> udtfOutputCols = new ArrayList<>();
    Iterator<String> colAliasesIter = colAliases.iterator();
    for (StructField sf : outputOI.getAllStructFieldRefs()) {
        String colAlias = colAliasesIter.next();
        assert (colAlias != null);
        // Since the UDTF operator feeds into a LVJ operator that will rename all the internal
        // names,
        // we can just use field name from the UDTF's OI as the internal name
        ColumnInfo col = new ColumnInfo(sf.getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(sf.getFieldObjectInspector()), outputTableAlias, false);
        udtfOutputCols.add(col);
    }
    // Create the row resolver for the table function scan
    HiveParserRowResolver udtfOutRR = new HiveParserRowResolver();
    for (int i = 0; i < udtfOutputCols.size(); i++) {
        udtfOutRR.put(outputTableAlias, colAliases.get(i), udtfOutputCols.get(i));
    }
    // Build row type from field <type, name>
    RelDataType retType = HiveParserTypeConverter.getType(cluster, udtfOutRR, null);
    List<RelDataType> argTypes = new ArrayList<>();
    RelDataTypeFactory dtFactory = cluster.getRexBuilder().getTypeFactory();
    for (ColumnInfo ci : opColInfos) {
        argTypes.add(HiveParserUtils.toRelDataType(ci.getType(), dtFactory));
    }
    SqlOperator calciteOp = HiveParserSqlFunctionConverter.getCalciteFn(genericUDTFName, argTypes, retType, false);
    RexNode rexNode = cluster.getRexBuilder().makeCall(calciteOp, operands);
    // convert the rex call
    TableFunctionConverter udtfConverter = new TableFunctionConverter(cluster, input, frameworkConfig.getOperatorTable(), catalogReader.nameMatcher());
    RexCall convertedCall = (RexCall) rexNode.accept(udtfConverter);
    SqlOperator convertedOperator = convertedCall.getOperator();
    Preconditions.checkState(convertedOperator instanceof SqlUserDefinedTableFunction, "Expect operator to be " + SqlUserDefinedTableFunction.class.getSimpleName() + ", actually got " + convertedOperator.getClass().getSimpleName());
    // TODO: how to decide this?
    Type elementType = Object[].class;
    // create LogicalTableFunctionScan
    RelNode tableFunctionScan = LogicalTableFunctionScan.create(input.getCluster(), Collections.emptyList(), convertedCall, elementType, retType, null);
    // remember the table alias for the UDTF so that we can reference the cols later
    qb.addAlias(outputTableAlias);
    RelNode correlRel;
    RexBuilder rexBuilder = cluster.getRexBuilder();
    // find correlation in the converted call
    Pair<List<CorrelationId>, ImmutableBitSet> correlUse = getCorrelationUse(convertedCall);
    // create correlate node
    if (correlUse == null) {
        correlRel = plannerContext.createRelBuilder(catalogManager.getCurrentCatalog(), catalogManager.getCurrentDatabase()).push(input).push(tableFunctionScan).join(isOuter ? JoinRelType.LEFT : JoinRelType.INNER, rexBuilder.makeLiteral(true)).build();
    } else {
        if (correlUse.left.size() > 1) {
            tableFunctionScan = DeduplicateCorrelateVariables.go(rexBuilder, correlUse.left.get(0), Util.skip(correlUse.left), tableFunctionScan);
        }
        correlRel = LogicalCorrelate.create(input, tableFunctionScan, correlUse.left.get(0), correlUse.right, isOuter ? JoinRelType.LEFT : JoinRelType.INNER);
    }
    // Add new rel & its RR to the maps
    relToHiveColNameCalcitePosMap.put(tableFunctionScan, buildHiveToCalciteColumnMap(udtfOutRR));
    relToRowResolver.put(tableFunctionScan, udtfOutRR);
    HiveParserRowResolver correlRR = HiveParserRowResolver.getCombinedRR(relToRowResolver.get(input), relToRowResolver.get(tableFunctionScan));
    relToHiveColNameCalcitePosMap.put(correlRel, buildHiveToCalciteColumnMap(correlRR));
    relToRowResolver.put(correlRel, correlRR);
    if (!inSelect) {
        return correlRel;
    }
    // create project node
    List<RexNode> projects = new ArrayList<>();
    HiveParserRowResolver projectRR = new HiveParserRowResolver();
    int j = 0;
    for (int i = input.getRowType().getFieldCount(); i < correlRel.getRowType().getFieldCount(); i++) {
        projects.add(cluster.getRexBuilder().makeInputRef(correlRel, i));
        ColumnInfo inputColInfo = correlRR.getRowSchema().getSignature().get(i);
        String colAlias = inputColInfo.getAlias();
        ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(j++), inputColInfo.getObjectInspector(), null, false);
        projectRR.put(null, colAlias, colInfo);
    }
    RelNode projectNode = LogicalProject.create(correlRel, Collections.emptyList(), projects, tableFunctionScan.getRowType());
    relToHiveColNameCalcitePosMap.put(projectNode, buildHiveToCalciteColumnMap(projectRR));
    relToRowResolver.put(projectNode, projectRR);
    return projectNode;
}
Also used : ImmutableBitSet(org.apache.calcite.util.ImmutableBitSet) SqlOperator(org.apache.calcite.sql.SqlOperator) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RelDataType(org.apache.calcite.rel.type.RelDataType) RexCall(org.apache.calcite.rex.RexCall) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) SqlUserDefinedTableFunction(org.apache.calcite.sql.validate.SqlUserDefinedTableFunction) HiveParserRowResolver(org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) DataType(org.apache.flink.table.types.DataType) RelDataType(org.apache.calcite.rel.type.RelDataType) RexBuilder(org.apache.calcite.rex.RexBuilder) ArrayList(java.util.ArrayList) CompositeList(org.apache.calcite.util.CompositeList) List(java.util.List) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) DataType(org.apache.flink.table.types.DataType) JoinType(org.apache.hadoop.hive.ql.parse.JoinType) RelDataType(org.apache.calcite.rel.type.RelDataType) JoinRelType(org.apache.calcite.rel.core.JoinRelType) HiveParserBaseSemanticAnalyzer.obtainTableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.obtainTableType) Type(java.lang.reflect.Type) TableType(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.TableType) HiveParserQBParseInfo(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo) RelNode(org.apache.calcite.rel.RelNode) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) RexNode(org.apache.calcite.rex.RexNode)

Example 15 with RelDataTypeFactory

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project flink by apache.

the class HiveParserDMLHelper method createInsertOperationInfo.

public Tuple4<ObjectIdentifier, QueryOperation, Map<String, String>, Boolean> createInsertOperationInfo(RelNode queryRelNode, Table destTable, Map<String, String> staticPartSpec, List<String> destSchema, boolean overwrite) throws SemanticException {
    // sanity check
    Preconditions.checkArgument(queryRelNode instanceof Project || queryRelNode instanceof Sort || queryRelNode instanceof LogicalDistribution, "Expect top RelNode to be Project, Sort, or LogicalDistribution, actually got " + queryRelNode);
    if (!(queryRelNode instanceof Project)) {
        RelNode parent = ((SingleRel) queryRelNode).getInput();
        // SEL + SORT or SEL + DIST + LIMIT
        Preconditions.checkArgument(parent instanceof Project || parent instanceof LogicalDistribution, "Expect input to be a Project or LogicalDistribution, actually got " + parent);
        if (parent instanceof LogicalDistribution) {
            RelNode grandParent = ((LogicalDistribution) parent).getInput();
            Preconditions.checkArgument(grandParent instanceof Project, "Expect input of LogicalDistribution to be a Project, actually got " + grandParent);
        }
    }
    // handle dest schema, e.g. insert into dest(.,.,.) select ...
    queryRelNode = handleDestSchema((SingleRel) queryRelNode, destTable, destSchema, staticPartSpec.keySet());
    // track each target col and its expected type
    RelDataTypeFactory typeFactory = plannerContext.getTypeFactory();
    LinkedHashMap<String, RelDataType> targetColToCalcType = new LinkedHashMap<>();
    List<TypeInfo> targetHiveTypes = new ArrayList<>();
    List<FieldSchema> allCols = new ArrayList<>(destTable.getCols());
    allCols.addAll(destTable.getPartCols());
    for (FieldSchema col : allCols) {
        TypeInfo hiveType = TypeInfoUtils.getTypeInfoFromTypeString(col.getType());
        targetHiveTypes.add(hiveType);
        targetColToCalcType.put(col.getName(), HiveParserTypeConverter.convert(hiveType, typeFactory));
    }
    // add static partitions to query source
    if (!staticPartSpec.isEmpty()) {
        if (queryRelNode instanceof Project) {
            queryRelNode = replaceProjectForStaticPart((Project) queryRelNode, staticPartSpec, destTable, targetColToCalcType);
        } else if (queryRelNode instanceof Sort) {
            Sort sort = (Sort) queryRelNode;
            RelNode oldInput = sort.getInput();
            RelNode newInput;
            if (oldInput instanceof LogicalDistribution) {
                newInput = replaceDistForStaticParts((LogicalDistribution) oldInput, destTable, staticPartSpec, targetColToCalcType);
            } else {
                newInput = replaceProjectForStaticPart((Project) oldInput, staticPartSpec, destTable, targetColToCalcType);
                // we may need to shift the field collations
                final int numDynmPart = destTable.getTTable().getPartitionKeys().size() - staticPartSpec.size();
                if (!sort.getCollation().getFieldCollations().isEmpty() && numDynmPart > 0) {
                    sort.replaceInput(0, null);
                    sort = LogicalSort.create(newInput, shiftRelCollation(sort.getCollation(), (Project) oldInput, staticPartSpec.size(), numDynmPart), sort.offset, sort.fetch);
                }
            }
            sort.replaceInput(0, newInput);
            queryRelNode = sort;
        } else {
            queryRelNode = replaceDistForStaticParts((LogicalDistribution) queryRelNode, destTable, staticPartSpec, targetColToCalcType);
        }
    }
    // add type conversions
    queryRelNode = addTypeConversions(plannerContext.getCluster().getRexBuilder(), queryRelNode, new ArrayList<>(targetColToCalcType.values()), targetHiveTypes, funcConverter);
    // create identifier
    List<String> targetTablePath = Arrays.asList(destTable.getDbName(), destTable.getTableName());
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(targetTablePath);
    ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    return Tuple4.of(identifier, new PlannerQueryOperation(queryRelNode), staticPartSpec, overwrite);
}
Also used : PlannerQueryOperation(org.apache.flink.table.planner.operations.PlannerQueryOperation) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) RelDataType(org.apache.calcite.rel.type.RelDataType) SingleRel(org.apache.calcite.rel.SingleRel) LogicalDistribution(org.apache.flink.table.planner.plan.nodes.hive.LogicalDistribution) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) LinkedHashMap(java.util.LinkedHashMap) Project(org.apache.calcite.rel.core.Project) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) RelNode(org.apache.calcite.rel.RelNode) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) LogicalSort(org.apache.calcite.rel.logical.LogicalSort) Sort(org.apache.calcite.rel.core.Sort) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

RelDataTypeFactory (org.apache.calcite.rel.type.RelDataTypeFactory)263 RelDataType (org.apache.calcite.rel.type.RelDataType)211 RexNode (org.apache.calcite.rex.RexNode)78 RexBuilder (org.apache.calcite.rex.RexBuilder)67 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)55 ArrayList (java.util.ArrayList)51 SqlTypeFactoryImpl (org.apache.calcite.sql.type.SqlTypeFactoryImpl)43 RelNode (org.apache.calcite.rel.RelNode)40 Test (org.junit.jupiter.api.Test)36 List (java.util.List)27 SqlNode (org.apache.calcite.sql.SqlNode)26 BigDecimal (java.math.BigDecimal)21 SqlTypeName (org.apache.calcite.sql.type.SqlTypeName)21 RelOptCluster (org.apache.calcite.plan.RelOptCluster)19 AggregateCall (org.apache.calcite.rel.core.AggregateCall)17 ImmutableList (com.google.common.collect.ImmutableList)15 Map (java.util.Map)15 JoinRelType (org.apache.calcite.rel.core.JoinRelType)15 SqlOperator (org.apache.calcite.sql.SqlOperator)15 SqlAggFunction (org.apache.calcite.sql.SqlAggFunction)14