Search in sources :

Example 26 with DataTypeDescriptor

use of org.apache.derby.iapi.types.DataTypeDescriptor in project derby by apache.

the class ColumnDefinitionNode method defaultTypeIsValid.

/**
 * Check the validity of the default for this node
 *
 * @param columnType TypeId of the target column.
 * @param columnDesc Description of the type of the
 *		target column.
 * @param defaultType TypeId of the default node.
 * @param defaultNode Parsed ValueNode for the default value.
 * @param defaultText Unparsed default value (as entered
 * 		by user).
 * @return True if the defaultNode abides by the restrictions
 * 	imposed by DB2 on default constants; false otherwise.
 */
boolean defaultTypeIsValid(TypeId columnType, DataTypeDescriptor columnDesc, TypeId defaultType, ValueNode defaultNode, String defaultText) throws StandardException {
    /* We can use info about the way the parser works
		 * to guide this process a little (see the getNumericNode()
		 * method in sqlgrammar.jj):
		 *
		 * 1) Tinyint and Smallints are both parsed as "INT" types,
	 	 *	  while integers larger than a basic "INT" are parsed into
		 *	  "LONGINT" or, if needed, "DECIMAL".
		 * 2) Floats, doubles, and decimals with fractional parts
		 *	  are all parsed as "DECIMAL".
		 * 3) All strings are parsed as "CHAR" constants (no varchar
		 *	  or any others; see stringLiteral() method in
		 *	  sqlgrammar.jj).
		 */
    int colType = columnType.getTypeFormatId();
    int defType = (defaultType == null ? -1 : defaultType.getTypeFormatId());
    if (!defaultNode.isConstantExpression()) {
        // then we have a built-in function, such as "user"
        // or "current schema".  If the function is a datetime
        // value function, then we don't need any special
        // action; however, if it's a "user" or "current schema"
        // function, then the column must be a char type with
        // minimum lengths matching those of DB2 (note that
        // such limits are ONLY enforced on defaults, not at
        // normal insertion time).
        boolean charCol = ((colType == StoredFormatIds.CHAR_TYPE_ID) || (colType == StoredFormatIds.VARCHAR_TYPE_ID) || (colType == StoredFormatIds.LONGVARCHAR_TYPE_ID));
        if (defaultNode instanceof SpecialFunctionNode) {
            switch(((SpecialFunctionNode) defaultNode).kind) {
                case SpecialFunctionNode.K_USER:
                case SpecialFunctionNode.K_CURRENT_USER:
                case SpecialFunctionNode.K_CURRENT_ROLE:
                case SpecialFunctionNode.K_SESSION_USER:
                case SpecialFunctionNode.K_SYSTEM_USER:
                    // Note also: any size under 30 gives a warning in DB2.
                    return (charCol && (columnDesc.getMaximumWidth() >= Limits.DB2_MIN_COL_LENGTH_FOR_CURRENT_USER));
                case SpecialFunctionNode.K_CURRENT_SCHEMA:
                    // DB2 enforces min length of 128.
                    return (charCol && (columnDesc.getMaximumWidth() >= Limits.DB2_MIN_COL_LENGTH_FOR_CURRENT_SCHEMA));
                default:
                    // else, function not allowed.
                    return false;
            }
        }
    }
    switch(colType) {
        case StoredFormatIds.BOOLEAN_TYPE_ID:
            return (defaultNode instanceof BooleanConstantNode);
        case StoredFormatIds.INT_TYPE_ID:
            // value if it's integer.
            return (defType == StoredFormatIds.INT_TYPE_ID);
        case StoredFormatIds.BIGINT_TYPE_ID:
            // covered by LONG_INT type.
            return ((defType == StoredFormatIds.INT_TYPE_ID) || (defType == StoredFormatIds.BIGINT_TYPE_ID));
        case StoredFormatIds.DECIMAL_TYPE_ID:
            if (defType == StoredFormatIds.DECIMAL_TYPE_ID) {
                // only valid if scale and precision are within
                // those of the column.  Note that scale here should
                // exclude any trailing 0's after the decimal
                DataTypeDescriptor defDesc = defaultNode.getTypeServices();
                int len = defaultText.length();
                int precision = defDesc.getPrecision();
                int scale = defDesc.getScale();
                for (int i = 1; i <= scale; scale--, precision--) {
                    if (defaultText.charAt(len - i) != '0')
                        break;
                }
                return ((scale <= columnDesc.getScale()) && ((precision - scale) <= (columnDesc.getPrecision() - columnDesc.getScale())));
            } else if ((defType == StoredFormatIds.BIGINT_TYPE_ID) || (defType == StoredFormatIds.INT_TYPE_ID)) {
                // done, just allow this and wait for insertion...
                return true;
            } else
                // no other types allowed.
                return false;
        case StoredFormatIds.CHAR_TYPE_ID:
        case StoredFormatIds.VARCHAR_TYPE_ID:
        case StoredFormatIds.LONGVARCHAR_TYPE_ID:
            // to here instead of waiting until insert time.
            return (defType == StoredFormatIds.CHAR_TYPE_ID);
        case StoredFormatIds.BIT_TYPE_ID:
        case StoredFormatIds.VARBIT_TYPE_ID:
        case StoredFormatIds.LONGVARBIT_TYPE_ID:
            // only valid if the default type is a BIT string.
            return (defType == StoredFormatIds.BIT_TYPE_ID);
        case StoredFormatIds.USERDEFINED_TYPE_ID_V3:
            // default is only valid if it's the same type as the column.
            return (defType == colType);
        case StoredFormatIds.BLOB_TYPE_ID:
        case StoredFormatIds.CLOB_TYPE_ID:
        case StoredFormatIds.SMALLINT_TYPE_ID:
        case StoredFormatIds.REAL_TYPE_ID:
        case StoredFormatIds.DOUBLE_TYPE_ID:
        case StoredFormatIds.DATE_TYPE_ID:
        case StoredFormatIds.TIME_TYPE_ID:
        case StoredFormatIds.TIMESTAMP_TYPE_ID:
            // just assume we're okay.
            return true;
        default:
            // valid for Derby running in DB2 compatibility mode.
            return false;
    }
}
Also used : DataTypeDescriptor(org.apache.derby.iapi.types.DataTypeDescriptor)

Example 27 with DataTypeDescriptor

use of org.apache.derby.iapi.types.DataTypeDescriptor in project derby by apache.

the class ConcatenationOperatorNode method bindExpression.

/**
 * overrides BindOperatorNode.bindExpression because concatenation has
 * special requirements for parameter binding.
 *
 * @exception StandardException
 *                thrown on failure
 */
@Override
ValueNode bindExpression(FromList fromList, SubqueryList subqueryList, List<AggregateNode> aggregates) throws StandardException {
    // deal with binding operands
    leftOperand = leftOperand.bindExpression(fromList, subqueryList, aggregates);
    rightOperand = rightOperand.bindExpression(fromList, subqueryList, aggregates);
    if (leftOperand.requiresTypeFromContext()) {
        if (rightOperand.requiresTypeFromContext()) {
            throw StandardException.newException(SQLState.LANG_BINARY_OPERANDS_BOTH_PARMS, operator);
        }
        TypeId leftType;
        /*
			 * * A ? on the left gets its type from the right. There are eight *
			 * legal types for the concatenation operator: CHAR, VARCHAR, * LONG
			 * VARCHAR, CLOB, BIT, BIT VARYING, LONG BIT VARYING, and BLOB. * If
			 * the right type is BLOB, set the parameter type to BLOB with max
			 * length. * If the right type is one of the other bit types, set
			 * the parameter type to * BIT VARYING with maximum length. * * If
			 * the right type is CLOB, set parameter type to CLOB with max
			 * length. * If the right type is anything else, set it to VARCHAR
			 * with * maximum length. We count on the resolveConcatOperation
			 * method to * catch an illegal type. * * NOTE: When I added the
			 * long types, I could have changed the * resulting parameter types
			 * to LONG VARCHAR and LONG BIT VARYING, * but they were already
			 * VARCHAR and BIT VARYING, and it wasn't * clear to me what effect
			 * it would have to change it. - Jeff
			 */
        if (rightOperand.getTypeId().isBitTypeId()) {
            if (rightOperand.getTypeId().isBlobTypeId())
                leftType = TypeId.getBuiltInTypeId(Types.BLOB);
            else
                leftType = TypeId.getBuiltInTypeId(Types.VARBINARY);
        } else {
            if (rightOperand.getTypeId().isClobTypeId())
                leftType = TypeId.getBuiltInTypeId(Types.CLOB);
            else
                leftType = TypeId.getBuiltInTypeId(Types.VARCHAR);
        }
        leftOperand.setType(new DataTypeDescriptor(leftType, true));
        if (rightOperand.getTypeId().isStringTypeId()) {
            // collation of ? operand should be picked from the context
            leftOperand.setCollationInfo(rightOperand.getTypeServices());
        }
    }
    /*
		 * Is there a ? parameter on the right?
		 */
    if (rightOperand.requiresTypeFromContext()) {
        TypeId rightType;
        /*
			 * * A ? on the right gets its type from the left. There are eight *
			 * legal types for the concatenation operator: CHAR, VARCHAR, * LONG
			 * VARCHAR, CLOB, BIT, BIT VARYING, LONG BIT VARYING, and BLOB. * If
			 * the left type is BLOB, set the parameter type to BLOB with max
			 * length. * If the left type is one of the other bit types, set the
			 * parameter type to * BIT VARYING with maximum length. * * If the
			 * left type is CLOB, set parameter type to CLOB with max length. *
			 * If the left type is anything else, set it to VARCHAR with *
			 * maximum length. We count on the resolveConcatOperation method to *
			 * catch an illegal type. * * NOTE: When I added the long types, I
			 * could have changed the * resulting parameter types to LONG
			 * VARCHAR and LONG BIT VARYING, * but they were already VARCHAR and
			 * BIT VARYING, and it wasn't * clear to me what effect it would
			 * have to change it. - Jeff
			 */
        if (leftOperand.getTypeId().isBitTypeId()) {
            if (leftOperand.getTypeId().isBlobTypeId())
                rightType = TypeId.getBuiltInTypeId(Types.BLOB);
            else
                rightType = TypeId.getBuiltInTypeId(Types.VARBINARY);
        } else {
            if (leftOperand.getTypeId().isClobTypeId())
                rightType = TypeId.getBuiltInTypeId(Types.CLOB);
            else
                rightType = TypeId.getBuiltInTypeId(Types.VARCHAR);
        }
        rightOperand.setType(new DataTypeDescriptor(rightType, true));
        if (leftOperand.getTypeId().isStringTypeId()) {
            // collation of ? operand should be picked from the context
            rightOperand.setCollationInfo(leftOperand.getTypeServices());
        }
    }
    /*
		 * If the left operand is not a built-in type, then generate a bound
		 * conversion tree to a built-in type.
		 */
    if (leftOperand.getTypeId().userType()) {
        leftOperand = leftOperand.genSQLJavaSQLTree();
    }
    /*
		 * If the right operand is not a built-in type, then generate a bound
		 * conversion tree to a built-in type.
		 */
    if (rightOperand.getTypeId().userType()) {
        rightOperand = rightOperand.genSQLJavaSQLTree();
    }
    /*
		 * If either the left or right operands are non-string, non-bit types,
		 * then we generate an implicit cast to VARCHAR.
		 */
    TypeCompiler tc = leftOperand.getTypeCompiler();
    if (!(leftOperand.getTypeId().isStringTypeId() || leftOperand.getTypeId().isBitTypeId())) {
        DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR, true, tc.getCastToCharWidth(leftOperand.getTypeServices()));
        leftOperand = new CastNode(leftOperand, dtd, getContextManager());
        // DERBY-2910 - Match current schema collation for implicit cast as we do for
        // explicit casts per SQL Spec 6.12 (10)
        leftOperand.setCollationUsingCompilationSchema();
        ((CastNode) leftOperand).bindCastNodeOnly();
    }
    tc = rightOperand.getTypeCompiler();
    if (!(rightOperand.getTypeId().isStringTypeId() || rightOperand.getTypeId().isBitTypeId())) {
        DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR, true, tc.getCastToCharWidth(rightOperand.getTypeServices()));
        rightOperand = new CastNode(rightOperand, dtd, getContextManager());
        // DERBY-2910 - Match current schema collation for implicit cast as we do for
        // explicit casts per SQL Spec 6.12 (10)
        rightOperand.setCollationUsingCompilationSchema();
        ((CastNode) rightOperand).bindCastNodeOnly();
    }
    /*
		 * * Set the result type of this operator based on the operands. * By
		 * convention, the left operand gets to decide the result type * of a
		 * binary operator.
		 */
    tc = leftOperand.getTypeCompiler();
    setType(resolveConcatOperation(leftOperand.getTypeServices(), rightOperand.getTypeServices()));
    /*
		 * * Make sure the maximum width set for the result doesn't exceed the
		 * result type's maximum width
		 */
    if (SanityManager.DEBUG) {
        if (getTypeServices().getMaximumWidth() > getTypeId().getMaximumMaximumWidth()) {
            SanityManager.THROWASSERT("The maximum length " + getTypeServices().getMaximumWidth() + " for the result type " + getTypeId().getSQLTypeName() + " can't be greater than it's maximum width of result's typeid" + getTypeId().getMaximumMaximumWidth());
        }
    }
    /*
		 * * Now that we know the target interface type, set it. This assumes *
		 * that both operands have the same interface type, which is a safe *
		 * assumption for the concatenation operator.
		 */
    this.setLeftRightInterfaceType(tc.interfaceName());
    // able to take advantage of concatenated literals like 'ab' || '%'.
    return this.evaluateConstantExpressions();
}
Also used : TypeId(org.apache.derby.iapi.types.TypeId) DataTypeDescriptor(org.apache.derby.iapi.types.DataTypeDescriptor) TypeCompiler(org.apache.derby.iapi.sql.compile.TypeCompiler)

Example 28 with DataTypeDescriptor

use of org.apache.derby.iapi.types.DataTypeDescriptor in project derby by apache.

the class ConcatenationOperatorNode method resolveConcatOperation.

/**
 * Resolve a concatenation operator
 *
 * @param leftType
 *            The DataTypeDescriptor of the left operand
 * @param rightType
 *            The DataTypeDescriptor of the right operand
 *
 * @return A DataTypeDescriptor telling the result type of the concatenate
 *         operation
 *
 * @exception StandardException
 *                BinaryOperatorNotSupported Thrown when a BinaryOperator is
 *                not supported on the operand types.
 */
private DataTypeDescriptor resolveConcatOperation(DataTypeDescriptor leftType, DataTypeDescriptor rightType) throws StandardException {
    TypeId leftTypeId;
    TypeId rightTypeId;
    String higherType;
    int resultLength;
    boolean nullable;
    leftTypeId = leftType.getTypeId();
    rightTypeId = rightType.getTypeId();
    if (!leftTypeId.isConcatableTypeId() || !rightTypeId.isConcatableTypeId() || (rightTypeId.isBitTypeId() && leftTypeId.isStringTypeId()) || (leftTypeId.isBitTypeId() && rightTypeId.isStringTypeId()))
        throw StandardException.newException(SQLState.LANG_DB2_FUNCTION_INCOMPATIBLE, "||", "FUNCTION");
    /*
		 * * The types aren't the same. The result of the operation is the *
		 * type of higher precedence.
		 */
    higherType = (leftTypeId.typePrecedence() >= rightTypeId.typePrecedence()) ? leftType.getTypeName() : rightType.getTypeName();
    /* Get the length of the result */
    resultLength = leftType.getMaximumWidth() + rightType.getMaximumWidth();
    // mismatch has already been handled earlier
    if (leftTypeId.getJDBCTypeId() == Types.CHAR || leftTypeId.getJDBCTypeId() == Types.BINARY) {
        switch(rightTypeId.getJDBCTypeId()) {
            case Types.CHAR:
            case Types.BINARY:
                if (resultLength > Limits.DB2_CHAR_MAXWIDTH) {
                    if (rightTypeId.getJDBCTypeId() == Types.CHAR)
                        // operands CHAR(A) CHAR(B) and A+B>254 then result is
                        // VARCHAR(A+B)
                        higherType = TypeId.VARCHAR_NAME;
                    else
                        // operands CHAR FOR BIT DATA(A) CHAR FOR BIT DATA(B)
                        // and A+B>254 then result is VARCHAR FOR BIT DATA(A+B)
                        higherType = TypeId.VARBIT_NAME;
                }
                break;
            case Types.VARCHAR:
            case Types.VARBINARY:
                if (resultLength > Limits.DB2_CONCAT_VARCHAR_LENGTH) {
                    if (rightTypeId.getJDBCTypeId() == Types.VARCHAR)
                        // operands CHAR(A) VARCHAR(B) and A+B>4000 then result
                        // is LONG VARCHAR
                        higherType = TypeId.LONGVARCHAR_NAME;
                    else
                        // operands CHAR FOR BIT DATA(A) VARCHAR FOR BIT DATA(B)
                        // and A+B>4000 then result is LONG VARCHAR FOR BIT DATA
                        higherType = TypeId.LONGVARBIT_NAME;
                }
                break;
            case Types.CLOB:
            case Types.BLOB:
                // operands CHAR(A), CLOB(B) then result is CLOB(MIN(A+B,2G))
                // operands CHAR FOR BIT DATA(A), BLOB(B) then result is
                // BLOB(MIN(A+B,2G))
                resultLength = clobBlobHandling(rightType, leftType);
                break;
        }
    } else if (leftTypeId.getJDBCTypeId() == Types.VARCHAR) {
        switch(rightTypeId.getJDBCTypeId()) {
            // operands CHAR(A) VARCHAR(B) and A+B>4000 then
            case Types.CHAR:
            // result is LONG VARCHAR
            case // operands VARCHAR(A) VARCHAR(B) and A+B>4000
            Types.VARCHAR:
                // then result is LONG VARCHAR
                if (resultLength > Limits.DB2_CONCAT_VARCHAR_LENGTH)
                    higherType = TypeId.LONGVARCHAR_NAME;
                break;
            case Types.CLOB:
                // operands VARCHAR(A), CLOB(B) then result is CLOB(MIN(A+B,2G))
                resultLength = clobBlobHandling(rightType, leftType);
                break;
        }
    } else if (leftTypeId.getJDBCTypeId() == Types.VARBINARY) {
        switch(rightTypeId.getJDBCTypeId()) {
            // operands CHAR FOR BIT DATA(A) VARCHAR FOR BIT
            case Types.BINARY:
            // VARCHAR FOR BIT DATA
            case // operands VARCHAR FOR BIT DATA(A) VARCHAR FOR
            Types.VARBINARY:
                // VARCHAR FOR BIT DATA
                if (resultLength > Limits.DB2_CONCAT_VARCHAR_LENGTH)
                    higherType = TypeId.LONGVARBIT_NAME;
                break;
            case Types.BLOB:
                // operands VARCHAR FOR BIT DATA(A), BLOB(B) then result is
                // BLOB(MIN(A+B,2G))
                resultLength = clobBlobHandling(rightType, leftType);
                break;
        }
    } else if (leftTypeId.getJDBCTypeId() == Types.CLOB || leftTypeId.getJDBCTypeId() == Types.BLOB) {
        // operands CLOB(A), CHAR(B) then result is CLOB(MIN(A+B,2G))
        // operands CLOB(A), VARCHAR(B) then result is CLOB(MIN(A+B,2G))
        // operands CLOB(A), LONG VARCHAR then result is CLOB(MIN(A+32K,2G))
        // operands CLOB(A), CLOB(B) then result is CLOB(MIN(A+B,2G))
        // operands BLOB(A), CHAR FOR BIT DATA(B) then result is
        // BLOB(MIN(A+B,2G))
        // operands BLOB(A), VARCHAR FOR BIT DATA(B) then result is
        // BLOB(MIN(A+B,2G))
        // operands BLOB(A), LONG VARCHAR FOR BIT DATA then result is
        // BLOB(MIN(A+32K,2G))
        // operands BLOB(A), BLOB(B) then result is BLOB(MIN(A+B,2G))
        resultLength = clobBlobHandling(leftType, rightType);
    } else if (rightTypeId.getJDBCTypeId() == Types.CLOB || rightTypeId.getJDBCTypeId() == Types.BLOB) {
        // operands LONG VARCHAR, CLOB(A) then result is CLOB(MIN(A+32K,2G))
        // operands LONG VARCHAR FOR BIT DATA, BLOB(A) then result is
        // BLOB(MIN(A+32K,2G))
        resultLength = clobBlobHandling(rightType, leftType);
    }
    // concatenated string, an exception will be thrown at execute time.
    if (higherType.equals(TypeId.LONGVARCHAR_NAME))
        resultLength = TypeId.LONGVARCHAR_MAXWIDTH;
    else if (higherType.equals(TypeId.LONGVARBIT_NAME))
        resultLength = TypeId.LONGVARBIT_MAXWIDTH;
    /*
		 * * Result Length can't be negative
		 */
    if (SanityManager.DEBUG) {
        if (resultLength < 0) {
            SanityManager.THROWASSERT("There should not be an overflow of maximum length for any result type at this point. Overflow for BLOB/CLOB has already been handled earlier");
        }
    }
    /* The result is nullable if either side is nullable */
    nullable = leftType.isNullable() || rightType.isNullable();
    /*
		 * * Create a new DataTypeDescriptor that has the correct * type and
		 * nullability. * * It's OK to call the implementation of the
		 * DataTypeDescriptorFactory * here, because we're in the same package.
		 */
    DataTypeDescriptor returnDTD = new DataTypeDescriptor(TypeId.getBuiltInTypeId(higherType), nullable, resultLength);
    // the result will be NONE.
    if (leftType.getCollationDerivation() != rightType.getCollationDerivation() || leftType.getCollationType() != rightType.getCollationType())
        returnDTD = returnDTD.getCollatedType(returnDTD.getCollationDerivation(), StringDataValue.COLLATION_DERIVATION_NONE);
    else {
        returnDTD = returnDTD.getCollatedType(leftType.getCollationType(), leftType.getCollationDerivation());
    }
    return returnDTD;
}
Also used : TypeId(org.apache.derby.iapi.types.TypeId) DataTypeDescriptor(org.apache.derby.iapi.types.DataTypeDescriptor)

Example 29 with DataTypeDescriptor

use of org.apache.derby.iapi.types.DataTypeDescriptor in project derby by apache.

the class ConditionalNode method bindExpression.

/**
 * Bind this expression.  This means binding the sub-expressions,
 * as well as figuring out what the return type is for this expression.
 *
 * @param fromList		The FROM list for the query this
 *				expression is in, for binding columns.
 * @param subqueryList		The subquery list being built as we find SubqueryNodes
 * @param aggregates        The aggregate list being built as we find AggregateNodes
 *
 * @return	The new top of the expression tree.
 *
 * @exception StandardException		Thrown on error
 */
@Override
ValueNode bindExpression(FromList fromList, SubqueryList subqueryList, List<AggregateNode> aggregates) throws StandardException {
    CompilerContext cc = getCompilerContext();
    int previousReliability = orReliability(CompilerContext.CONDITIONAL_RESTRICTION);
    ValueNodeList caseOperandParameters = bindCaseOperand(cc, fromList, subqueryList, aggregates);
    testConditions.bindExpression(fromList, subqueryList, aggregates);
    // parameter), find out which type best describes it.
    if (caseOperandParameters != null) {
        // when testConditions was bound.
        for (ValueNode vn : caseOperandParameters) {
            // Check that this parameter is comparable to all the other
            // parameters in the list. This indirectly checks whether
            // all when operands have compatible types.
            caseOperandParameters.comparable(vn);
            // Replace the dummy parameter node with the actual case
            // operand.
            testConditions.accept(new ReplaceNodeVisitor(vn, caseOperand));
        }
        // Finally, after we have determined that all the when operands
        // are compatible, and we have reinserted the case operand into
        // the tree, set the type of the case operand to the dominant
        // type of all the when operands.
        caseOperand.setType(caseOperandParameters.getDominantTypeServices());
    }
    thenElseList.bindExpression(fromList, subqueryList, aggregates);
    // Find the type of the first typed value in thenElseList and cast
    // all untyped NULL values to that type. We don't need to find the
    // dominant type here, since a top-level cast to that type will be
    // added later, if necessary.
    DataTypeDescriptor nullType = thenElseList.getTypeServices();
    if (nullType == null) {
        // an error.
        throw StandardException.newException(SQLState.LANG_ALL_RESULT_EXPRESSIONS_UNTYPED);
    } else {
        recastNullNodes(nullType, fromList, subqueryList, aggregates);
    }
    // Set the result type of this conditional to be the dominant type
    // of the result expressions.
    setType(thenElseList.getDominantTypeServices());
    /* testCondition must be a boolean expression.
		 * If it is a ? parameter on the left, then set type to boolean,
		 * otherwise verify that the result type is boolean.
		 */
    testConditions.setParameterDescriptor(new DataTypeDescriptor(TypeId.BOOLEAN_ID, true));
    for (ValueNode testCondition : testConditions) {
        if (!testCondition.getTypeServices().getTypeId().equals(TypeId.BOOLEAN_ID)) {
            throw StandardException.newException(SQLState.LANG_CONDITIONAL_NON_BOOLEAN);
        }
    }
    // Set the type of the parameters.
    thenElseList.setParameterDescriptor(getTypeServices());
    /* The then and else expressions must be type compatible */
    ClassInspector cu = getClassFactory().getClassInspector();
    /*
		** If it is comparable, then we are ok.  Note that we
		** could in fact allow any expressions that are convertible()
		** since we are going to generate a cast node, but that might
		** be confusing to users...
		*/
    for (ValueNode expr : thenElseList) {
        DataTypeDescriptor dtd = expr.getTypeServices();
        String javaTypeName = dtd.getTypeId().getCorrespondingJavaTypeName();
        String resultJavaTypeName = getTypeId().getCorrespondingJavaTypeName();
        if (!dtd.comparable(getTypeServices(), false, getClassFactory()) && !cu.assignableTo(javaTypeName, resultJavaTypeName) && !cu.assignableTo(resultJavaTypeName, javaTypeName)) {
            throw StandardException.newException(SQLState.LANG_NOT_TYPE_COMPATIBLE, dtd.getTypeId().getSQLTypeName(), getTypeId().getSQLTypeName());
        }
    }
    // The result is nullable if and only if at least one of the result
    // expressions is nullable (DERBY-6567).
    setNullability(thenElseList.isNullable());
    /*
		** Generate a CastNode if necessary and
		** stick it over the original expression
		*/
    TypeId condTypeId = getTypeId();
    for (int i = 0; i < thenElseList.size(); i++) {
        ValueNode expr = thenElseList.elementAt(i);
        if (expr.getTypeId().typePrecedence() != condTypeId.typePrecedence()) {
            // Cast to dominant type.
            ValueNode cast = new CastNode(expr, getTypeServices(), getContextManager());
            cast = cast.bindExpression(fromList, subqueryList, aggregates);
            thenElseList.setElementAt(cast, i);
        }
    }
    cc.setReliability(previousReliability);
    return this;
}
Also used : TypeId(org.apache.derby.iapi.types.TypeId) DataTypeDescriptor(org.apache.derby.iapi.types.DataTypeDescriptor) ClassInspector(org.apache.derby.iapi.services.loader.ClassInspector) CompilerContext(org.apache.derby.iapi.sql.compile.CompilerContext)

Example 30 with DataTypeDescriptor

use of org.apache.derby.iapi.types.DataTypeDescriptor in project derby by apache.

the class CreateTableNode method makeConstantAction.

/**
 * Create the Constant information that will drive the guts of Execution.
 *
 * @exception StandardException		Thrown on failure
 */
@Override
public ConstantAction makeConstantAction() throws StandardException {
    TableElementList coldefs = tableElementList;
    // for each column, stuff system.column
    ColumnInfo[] colInfos = new ColumnInfo[coldefs.countNumberOfColumns()];
    int numConstraints = coldefs.genColumnInfos(colInfos);
    /* If we've seen a constraint, then build a constraint list */
    CreateConstraintConstantAction[] conActions = null;
    SchemaDescriptor sd = getSchemaDescriptor(tableType != TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE, true);
    if (numConstraints > 0) {
        conActions = new CreateConstraintConstantAction[numConstraints];
        coldefs.genConstraintActions(true, conActions, getRelativeName(), sd, getDataDictionary());
    }
    // if the any of columns are "long" and user has not specified a
    // page size, set the pagesize to 32k.
    // Also in case where the approximate sum of the column sizes is
    // greater than the bump threshold , bump the pagesize to 32k
    boolean table_has_long_column = false;
    int approxLength = 0;
    for (int i = 0; i < colInfos.length; i++) {
        DataTypeDescriptor dts = colInfos[i].getDataType();
        if (dts.getTypeId().isLongConcatableTypeId()) {
            table_has_long_column = true;
            break;
        }
        approxLength += dts.getTypeId().getApproximateLengthInBytes(dts);
    }
    if (table_has_long_column || (approxLength > Property.TBL_PAGE_SIZE_BUMP_THRESHOLD)) {
        if (((properties == null) || (properties.get(Property.PAGE_SIZE_PARAMETER) == null)) && (PropertyUtil.getServiceProperty(getLanguageConnectionContext().getTransactionCompile(), Property.PAGE_SIZE_PARAMETER) == null)) {
            if (properties == null)
                properties = new Properties();
            properties.put(Property.PAGE_SIZE_PARAMETER, Property.PAGE_SIZE_DEFAULT_LONG);
        }
    }
    return (getGenericConstantActionFactory().getCreateTableConstantAction(sd.getSchemaName(), getRelativeName(), tableType, colInfos, conActions, properties, lockGranularity, onCommitDeleteRows, onRollbackDeleteRows));
}
Also used : SchemaDescriptor(org.apache.derby.iapi.sql.dictionary.SchemaDescriptor) DataTypeDescriptor(org.apache.derby.iapi.types.DataTypeDescriptor) ColumnInfo(org.apache.derby.impl.sql.execute.ColumnInfo) Properties(java.util.Properties) CreateConstraintConstantAction(org.apache.derby.impl.sql.execute.CreateConstraintConstantAction)

Aggregations

DataTypeDescriptor (org.apache.derby.iapi.types.DataTypeDescriptor)99 TypeId (org.apache.derby.iapi.types.TypeId)32 ColumnDescriptor (org.apache.derby.iapi.sql.dictionary.ColumnDescriptor)14 DataValueDescriptor (org.apache.derby.iapi.types.DataValueDescriptor)14 CompilerContext (org.apache.derby.iapi.sql.compile.CompilerContext)9 SchemaDescriptor (org.apache.derby.iapi.sql.dictionary.SchemaDescriptor)8 ExecRow (org.apache.derby.iapi.sql.execute.ExecRow)8 TypeDescriptor (org.apache.derby.catalog.TypeDescriptor)7 UUID (org.apache.derby.catalog.UUID)5 ClassFactory (org.apache.derby.iapi.services.loader.ClassFactory)5 ResultColumnDescriptor (org.apache.derby.iapi.sql.ResultColumnDescriptor)5 TypeCompiler (org.apache.derby.iapi.sql.compile.TypeCompiler)5 StandardException (org.apache.derby.shared.common.error.StandardException)5 Properties (java.util.Properties)4 UserDefinedTypeIdImpl (org.apache.derby.catalog.types.UserDefinedTypeIdImpl)4 FormatableBitSet (org.apache.derby.iapi.services.io.FormatableBitSet)4 AliasDescriptor (org.apache.derby.iapi.sql.dictionary.AliasDescriptor)3 ColumnDescriptorList (org.apache.derby.iapi.sql.dictionary.ColumnDescriptorList)3 DataDictionary (org.apache.derby.iapi.sql.dictionary.DataDictionary)3 ExecIndexRow (org.apache.derby.iapi.sql.execute.ExecIndexRow)3