Search in sources :

Example 71 with RelDataTypeFactory

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project hazelcast by hazelcast.

the class HazelcastInOperator method deriveType.

@Override
public RelDataType deriveType(SqlValidator validator, SqlValidatorScope scope, SqlCall call) {
    final List<SqlNode> operands = call.getOperandList();
    assert operands.size() == 2;
    final SqlNode left = operands.get(0);
    final SqlNode right = operands.get(1);
    final RelDataTypeFactory typeFactory = validator.getTypeFactory();
    RelDataType leftType = validator.deriveType(scope, left);
    RelDataType rightType;
    // Derive type for RHS.
    if (right instanceof SqlNodeList) {
        // Handle the 'IN (expr, ...)' form.
        List<RelDataType> rightTypeList = new ArrayList<>();
        SqlNodeList nodeList = (SqlNodeList) right;
        for (SqlNode node : nodeList) {
            if (node instanceof SqlLiteral) {
                SqlLiteral lit = (SqlLiteral) node;
                // We are not supporting raw NULL literals within IN right-hand side list.
                if (lit.getValue() == null) {
                    throw validator.newValidationError(right, HZRESOURCE.noRawNullsAllowed());
                }
            }
            RelDataType nodeType = validator.deriveType(scope, node);
            rightTypeList.add(nodeType);
        }
        rightType = typeFactory.leastRestrictive(rightTypeList);
        // Same rules as the VALUES operator (per SQL:2003 Part 2 Section 8.4, <in predicate>).
        if (null == rightType && validator.config().typeCoercionEnabled()) {
            // Do implicit type cast if it is allowed to.
            rightType = validator.getTypeCoercion().getWiderTypeFor(rightTypeList, false);
        }
        if (null == rightType) {
            throw validator.newValidationError(right, RESOURCE.incompatibleTypesInList());
        }
        // Record the RHS type for use by SqlToRelConverter.
        validator.setValidatedNodeType(nodeList, rightType);
    } else {
        // We do not support sub-querying for IN operator.
        throw validator.newValidationError(call, HZRESOURCE.noSubQueryAllowed());
    }
    HazelcastCallBinding hazelcastCallBinding = prepareBinding(new SqlCallBinding(validator, scope, call));
    // Coerce type first.
    if (hazelcastCallBinding.isTypeCoercionEnabled()) {
        boolean coerced = hazelcastCallBinding.getValidator().getTypeCoercion().inOperationCoercion(hazelcastCallBinding);
        if (coerced) {
            // Update the node data type if we coerced any type.
            leftType = validator.deriveType(scope, call.operand(0));
            rightType = validator.deriveType(scope, call.operand(1));
        }
    }
    // Now check that the left expression is compatible with the
    // type of the list. Same strategy as the '=' operator.
    // Normalize the types on both sides to be row types
    // for the purposes of compatibility-checking.
    RelDataType leftRowType = SqlTypeUtil.promoteToRowType(typeFactory, leftType, null);
    RelDataType rightRowType = SqlTypeUtil.promoteToRowType(typeFactory, rightType, null);
    final ComparableOperandTypeChecker checker = (ComparableOperandTypeChecker) OperandTypes.COMPARABLE_UNORDERED_COMPARABLE_UNORDERED;
    if (!checker.checkOperandTypes(new ExplicitOperatorBinding(hazelcastCallBinding, ImmutableList.of(leftRowType, rightRowType)), hazelcastCallBinding)) {
        throw validator.newValidationError(call, RESOURCE.incompatibleValueType(SqlStdOperatorTable.IN.getName()));
    }
    return typeFactory.createTypeWithNullability(typeFactory.createSqlType(SqlTypeName.BOOLEAN), anyNullable(leftRowType.getFieldList()) || anyNullable(rightRowType.getFieldList()));
}
Also used : ArrayList(java.util.ArrayList) RelDataType(org.apache.calcite.rel.type.RelDataType) ExplicitOperatorBinding(org.apache.calcite.sql.ExplicitOperatorBinding) ComparableOperandTypeChecker(org.apache.calcite.sql.type.ComparableOperandTypeChecker) HazelcastCallBinding(com.hazelcast.jet.sql.impl.validate.HazelcastCallBinding) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) SqlCallBinding(org.apache.calcite.sql.SqlCallBinding) SqlNodeList(org.apache.calcite.sql.SqlNodeList) SqlLiteral(org.apache.calcite.sql.SqlLiteral) SqlNode(org.apache.calcite.sql.SqlNode)

Example 72 with RelDataTypeFactory

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project hazelcast by hazelcast.

the class HazelcastOperandTypeInference method inferOperandTypes.

@Override
public void inferOperandTypes(SqlCallBinding callBinding, RelDataType returnType, RelDataType[] operandTypes) {
    SqlCall call = callBinding.getCall();
    if (ValidationUtil.hasAssignment(call)) {
        RelDataTypeFactory typeFactory = callBinding.getTypeFactory();
        RelDataType[] parameterTypes = new RelDataType[parametersByName.size()];
        for (int i = 0; i < call.operandCount(); i++) {
            SqlCall assignment = call.operand(i);
            SqlIdentifier id = assignment.operand(1);
            String name = id.getSimple();
            HazelcastTableFunctionParameter parameter = parametersByName.get(name);
            if (parameter != null) {
                SqlTypeName parameterType = parameter.type();
                parameterTypes[parameter.ordinal()] = toType(parameterType, typeFactory);
            } else {
                throw SqlUtil.newContextException(id.getParserPosition(), RESOURCE.unknownArgumentName(name));
            }
        }
        // noinspection ResultOfMethodCallIgnored
        Arrays.stream(parameterTypes).filter(Objects::nonNull).toArray(ignored -> operandTypes);
    } else {
        positionalOperandTypeInference.inferOperandTypes(callBinding, returnType, operandTypes);
    }
}
Also used : SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) SqlCall(org.apache.calcite.sql.SqlCall) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) RelDataType(org.apache.calcite.rel.type.RelDataType) HazelcastTableFunctionParameter(com.hazelcast.jet.sql.impl.schema.HazelcastTableFunctionParameter) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier)

Example 73 with RelDataTypeFactory

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project hive by apache.

the class PlanModifierForASTConv method replaceEmptyGroupAggr.

private static void replaceEmptyGroupAggr(final RelNode rel, RelNode parent) {
    // If this function is called, the parent should only include constant
    List<RexNode> exps = parent instanceof Project ? ((Project) parent).getProjects() : Collections.emptyList();
    for (RexNode rexNode : exps) {
        if (!rexNode.accept(new HiveCalciteUtil.ConstantFinder())) {
            throw new RuntimeException("We expect " + parent.toString() + " to contain only constants. However, " + rexNode.toString() + " is " + rexNode.getKind());
        }
    }
    HiveAggregate oldAggRel = (HiveAggregate) rel;
    RelDataTypeFactory typeFactory = oldAggRel.getCluster().getTypeFactory();
    RelDataType longType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, typeFactory);
    RelDataType intType = TypeConverter.convert(TypeInfoFactory.intTypeInfo, typeFactory);
    // Create the dummy aggregation.
    SqlAggFunction countFn = SqlFunctionConverter.getCalciteAggFn("count", false, ImmutableList.of(intType), longType);
    // TODO: Using 0 might be wrong; might need to walk down to find the
    // proper index of a dummy.
    List<Integer> argList = ImmutableList.of(0);
    AggregateCall dummyCall = new AggregateCall(countFn, false, argList, longType, null);
    Aggregate newAggRel = oldAggRel.copy(oldAggRel.getTraitSet(), oldAggRel.getInput(), oldAggRel.indicator, oldAggRel.getGroupSet(), oldAggRel.getGroupSets(), ImmutableList.of(dummyCall));
    RelNode select = introduceDerivedTable(newAggRel);
    parent.replaceInput(0, select);
}
Also used : RelDataType(org.apache.calcite.rel.type.RelDataType) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) AggregateCall(org.apache.calcite.rel.core.AggregateCall) Project(org.apache.calcite.rel.core.Project) HiveProject(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject) HiveAggregate(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate) RelNode(org.apache.calcite.rel.RelNode) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) HiveAggregate(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate) Aggregate(org.apache.calcite.rel.core.Aggregate) RexNode(org.apache.calcite.rex.RexNode)

Example 74 with RelDataTypeFactory

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project hive by apache.

the class RexNodeConverter method convert.

protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException {
    final RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
    final PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
    final RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory);
    PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();
    ConstantObjectInspector coi = literal.getWritableObjectInspector();
    Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);
    RexNode calciteLiteral = null;
    // If value is null, the type should also be VOID.
    if (value == null) {
        hiveTypeCategory = PrimitiveCategory.VOID;
    }
    // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
    switch(hiveTypeCategory) {
        case BOOLEAN:
            calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue());
            break;
        case BYTE:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
            break;
        case SHORT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
            break;
        case INT:
            calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
            break;
        case LONG:
            calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
            break;
        case DECIMAL:
            if (value instanceof HiveDecimal) {
                value = ((HiveDecimal) value).bigDecimalValue();
            } else if (value instanceof Decimal128) {
                value = ((Decimal128) value).toBigDecimal();
            }
            if (value == null) {
                // literals.
                throw new CalciteSemanticException("Expression " + literal.getExprString() + " is not a valid decimal", UnsupportedFeature.Invalid_decimal);
            // TODO: return createNullLiteral(literal);
            }
            calciteLiteral = rexBuilder.makeExactLiteral((BigDecimal) value, calciteDataType);
            break;
        case FLOAT:
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Float.toString((Float) value)), calciteDataType);
            break;
        case DOUBLE:
            // TODO: The best solution is to support NaN in expression reduction.
            if (Double.isNaN((Double) value)) {
                throw new CalciteSemanticException("NaN", UnsupportedFeature.Invalid_decimal);
            }
            calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal(Double.toString((Double) value)), calciteDataType);
            break;
        case CHAR:
            if (value instanceof HiveChar) {
                value = ((HiveChar) value).getValue();
            }
            final int lengthChar = TypeInfoUtils.getCharacterLengthForType(hiveType);
            RelDataType charType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.CHAR, lengthChar), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            calciteLiteral = rexBuilder.makeLiteral(RexNodeExprFactory.makeHiveUnicodeString((String) value), charType, false);
            break;
        case VARCHAR:
            if (value instanceof HiveVarchar) {
                value = ((HiveVarchar) value).getValue();
            }
            final int lengthVarchar = TypeInfoUtils.getCharacterLengthForType(hiveType);
            RelDataType varcharType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, lengthVarchar), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            calciteLiteral = rexBuilder.makeLiteral(RexNodeExprFactory.makeHiveUnicodeString((String) value), varcharType, true);
            break;
        case STRING:
            RelDataType stringType = rexBuilder.getTypeFactory().createTypeWithCharsetAndCollation(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.VARCHAR, Integer.MAX_VALUE), Charset.forName(ConversionUtil.NATIVE_UTF16_CHARSET_NAME), SqlCollation.IMPLICIT);
            calciteLiteral = rexBuilder.makeLiteral(RexNodeExprFactory.makeHiveUnicodeString((String) value), stringType, true);
            break;
        case DATE:
            final Date date = (Date) value;
            calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromDaysSinceEpoch(date.toEpochDay()));
            break;
        case TIMESTAMP:
            final TimestampString tsString;
            if (value instanceof Calendar) {
                tsString = TimestampString.fromCalendarFields((Calendar) value);
            } else {
                final Timestamp ts = (Timestamp) value;
                tsString = TimestampString.fromMillisSinceEpoch(ts.toEpochMilli()).withNanos(ts.getNanos());
            }
            // Must call makeLiteral, not makeTimestampLiteral
            // to have the RexBuilder.roundTime logic kick in
            calciteLiteral = rexBuilder.makeLiteral(tsString, rexBuilder.getTypeFactory().createSqlType(SqlTypeName.TIMESTAMP, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP)), false);
            break;
        case TIMESTAMPLOCALTZ:
            final TimestampString tsLocalTZString;
            Instant i = ((TimestampTZ) value).getZonedDateTime().toInstant();
            tsLocalTZString = TimestampString.fromMillisSinceEpoch(i.toEpochMilli()).withNanos(i.getNano());
            calciteLiteral = rexBuilder.makeTimestampWithLocalTimeZoneLiteral(tsLocalTZString, rexBuilder.getTypeFactory().getTypeSystem().getDefaultPrecision(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE));
            break;
        case INTERVAL_YEAR_MONTH:
            // Calcite year-month literal value is months as BigDecimal
            BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths());
            calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
            break;
        case INTERVAL_DAY_TIME:
            // Calcite day-time interval is millis value as BigDecimal
            // Seconds converted to millis
            BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
            // Nanos converted to millis
            BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
            calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, new SqlParserPos(1, 1)));
            break;
        case VOID:
            calciteLiteral = rexBuilder.makeLiteral(null, calciteDataType, true);
            break;
        case BINARY:
        case UNKNOWN:
        default:
            throw new RuntimeException("Unsupported Literal");
    }
    return calciteLiteral;
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) RelDataType(org.apache.calcite.rel.type.RelDataType) GenericUDFTimestamp(org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) SqlParserPos(org.apache.calcite.sql.parser.SqlParserPos) SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) Calendar(java.util.Calendar) Instant(java.time.Instant) Decimal128(org.apache.hadoop.hive.common.type.Decimal128) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) Date(org.apache.hadoop.hive.common.type.Date) HiveFloorDate(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFloorDate) HiveExtractDate(org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveExtractDate) GenericUDFToDate(org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDate) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) TimestampString(org.apache.calcite.util.TimestampString) RexNode(org.apache.calcite.rex.RexNode)

Example 75 with RelDataTypeFactory

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory in project hive by apache.

the class TestCBOMaxNumToCNF method testCBOMaxNumToCNF1.

@Test
public void testCBOMaxNumToCNF1() {
    // OR(=($0, 1), AND(=($0, 0), =($1, 8)))
    // transformation creates 7 nodes AND(OR(=($0, 1), =($0, 0)), OR(=($0, 1), =($1, 8)))
    // thus, it is triggered
    final RelDataTypeFactory typeFactory = new JavaTypeFactoryImpl();
    final RexBuilder rexBuilder = new RexBuilder(typeFactory);
    final RexNode cond = rexBuilder.makeCall(SqlStdOperatorTable.OR, rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 0), rexBuilder.makeLiteral(1, typeFactory.createSqlType(SqlTypeName.INTEGER), false)), rexBuilder.makeCall(SqlStdOperatorTable.AND, rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 0), rexBuilder.makeLiteral(0, typeFactory.createSqlType(SqlTypeName.INTEGER), false)), rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(typeFactory.createSqlType(SqlTypeName.INTEGER), 1), rexBuilder.makeLiteral(8, typeFactory.createSqlType(SqlTypeName.INTEGER), false))));
    final RexNode newCond = RexUtil.toCnf(rexBuilder, maxNumNodesCNF, cond);
    assertEquals(newCond.toString(), "AND(OR(=($0, 1), =($0, 0)), OR(=($0, 1), =($1, 8)))");
}
Also used : JavaTypeFactoryImpl(org.apache.calcite.jdbc.JavaTypeFactoryImpl) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) RexBuilder(org.apache.calcite.rex.RexBuilder) RexNode(org.apache.calcite.rex.RexNode) Test(org.junit.Test)

Aggregations

RelDataTypeFactory (org.apache.calcite.rel.type.RelDataTypeFactory)128 RelDataType (org.apache.calcite.rel.type.RelDataType)97 RexNode (org.apache.calcite.rex.RexNode)47 RexBuilder (org.apache.calcite.rex.RexBuilder)38 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)30 Test (org.junit.Test)22 ArrayList (java.util.ArrayList)20 RelNode (org.apache.calcite.rel.RelNode)20 AggregateCall (org.apache.calcite.rel.core.AggregateCall)15 SqlNode (org.apache.calcite.sql.SqlNode)14 SqlTypeFactoryImpl (org.apache.calcite.sql.type.SqlTypeFactoryImpl)14 BigDecimal (java.math.BigDecimal)12 List (java.util.List)10 RelDataTypeFactory (org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.rel.type.RelDataTypeFactory)8 SqlAggFunction (org.apache.calcite.sql.SqlAggFunction)8 SqlTypeName (org.apache.calcite.sql.type.SqlTypeName)8 Calendar (java.util.Calendar)7 RexInputRef (org.apache.calcite.rex.RexInputRef)7 Pair (org.apache.calcite.util.Pair)7 TimestampString (org.apache.calcite.util.TimestampString)7