Search in sources :

Example 96 with SqlTypeName

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName in project calcite by apache.

the class SqlValidatorTest method testIntervalLiterals.

@Test
public void testIntervalLiterals() {
    // First check that min, max, and defaults are what we expect
    // (values used in subtests depend on these being true to
    // accurately test bounds)
    final RelDataTypeSystem typeSystem = getTester().getValidator().getTypeFactory().getTypeSystem();
    final RelDataTypeSystem defTypeSystem = RelDataTypeSystem.DEFAULT;
    for (SqlTypeName typeName : SqlTypeName.INTERVAL_TYPES) {
        assertThat(typeName.getMinPrecision(), is(1));
        assertThat(typeSystem.getMaxPrecision(typeName), is(10));
        assertThat(typeSystem.getDefaultPrecision(typeName), is(2));
        assertThat(typeName.getMinScale(), is(1));
        assertThat(typeSystem.getMaxScale(typeName), is(9));
        assertThat(typeName.getDefaultScale(), is(6));
    }
    // Tests that should pass both parser and validator
    subTestIntervalYearPositive();
    subTestIntervalYearToMonthPositive();
    subTestIntervalMonthPositive();
    subTestIntervalDayPositive();
    subTestIntervalDayToHourPositive();
    subTestIntervalDayToMinutePositive();
    subTestIntervalDayToSecondPositive();
    subTestIntervalHourPositive();
    subTestIntervalHourToMinutePositive();
    subTestIntervalHourToSecondPositive();
    subTestIntervalMinutePositive();
    subTestIntervalMinuteToSecondPositive();
    subTestIntervalSecondPositive();
    // Tests that should pass parser but fail validator
    subTestIntervalYearNegative();
    subTestIntervalYearToMonthNegative();
    subTestIntervalMonthNegative();
    subTestIntervalDayNegative();
    subTestIntervalDayToHourNegative();
    subTestIntervalDayToMinuteNegative();
    subTestIntervalDayToSecondNegative();
    subTestIntervalHourNegative();
    subTestIntervalHourToMinuteNegative();
    subTestIntervalHourToSecondNegative();
    subTestIntervalMinuteNegative();
    subTestIntervalMinuteToSecondNegative();
    subTestIntervalSecondNegative();
    // Miscellaneous
    // fractional value is not OK, even if it is 0
    checkWholeExpFails("INTERVAL '1.0' HOUR", "Illegal interval literal format '1.0' for INTERVAL HOUR");
    // only seconds are allowed to have a fractional part
    checkExpType("INTERVAL '1.0' SECOND", "INTERVAL SECOND NOT NULL");
    // leading zeroes do not cause precision to be exceeded
    checkExpType("INTERVAL '0999' MONTH(3)", "INTERVAL MONTH(3) NOT NULL");
}
Also used : RelDataTypeSystem(org.apache.calcite.rel.type.RelDataTypeSystem) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) Test(org.junit.Test)

Example 97 with SqlTypeName

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName in project calcite by apache.

the class DruidQuery method toDruidColumn.

/**
 * @param rexNode    leaf Input Ref to Druid Column
 * @param rowType    row type
 * @param druidQuery druid query
 *
 * @return {@link Pair} of Column name and Extraction Function on the top of the input ref or
 * {@link Pair of(null, null)} when can not translate to valid Druid column
 */
protected static Pair<String, ExtractionFunction> toDruidColumn(RexNode rexNode, RelDataType rowType, DruidQuery druidQuery) {
    final String columnName;
    final ExtractionFunction extractionFunction;
    final Granularity granularity;
    switch(rexNode.getKind()) {
        case INPUT_REF:
            columnName = extractColumnName(rexNode, rowType, druidQuery);
            // @TODO we can remove this ugly check by treating druid time columns as LONG
            if (rexNode.getType().getFamily() == SqlTypeFamily.DATE || rexNode.getType().getFamily() == SqlTypeFamily.TIMESTAMP) {
                extractionFunction = TimeExtractionFunction.createDefault(druidQuery.getConnectionConfig().timeZone());
            } else {
                extractionFunction = null;
            }
            break;
        case EXTRACT:
            granularity = DruidDateTimeUtils.extractGranularity(rexNode, druidQuery.getConnectionConfig().timeZone());
            if (granularity == null) {
                // unknown Granularity
                return Pair.of(null, null);
            }
            if (!TimeExtractionFunction.isValidTimeExtract((RexCall) rexNode)) {
                return Pair.of(null, null);
            }
            extractionFunction = TimeExtractionFunction.createExtractFromGranularity(granularity, druidQuery.getConnectionConfig().timeZone());
            columnName = extractColumnName(((RexCall) rexNode).getOperands().get(1), rowType, druidQuery);
            break;
        case FLOOR:
            granularity = DruidDateTimeUtils.extractGranularity(rexNode, druidQuery.getConnectionConfig().timeZone());
            if (granularity == null) {
                // unknown Granularity
                return Pair.of(null, null);
            }
            if (!TimeExtractionFunction.isValidTimeFloor((RexCall) rexNode)) {
                return Pair.of(null, null);
            }
            extractionFunction = TimeExtractionFunction.createFloorFromGranularity(granularity, druidQuery.getConnectionConfig().timeZone());
            columnName = extractColumnName(((RexCall) rexNode).getOperands().get(0), rowType, druidQuery);
            break;
        case CAST:
            // CASE we have a cast over InputRef. Check that cast is valid
            if (!isValidLeafCast(rexNode)) {
                return Pair.of(null, null);
            }
            columnName = extractColumnName(((RexCall) rexNode).getOperands().get(0), rowType, druidQuery);
            // CASE CAST to TIME/DATE need to make sure that we have valid extraction fn
            final SqlTypeName toTypeName = rexNode.getType().getSqlTypeName();
            if (toTypeName.getFamily() == SqlTypeFamily.TIMESTAMP || toTypeName.getFamily() == SqlTypeFamily.DATETIME) {
                extractionFunction = TimeExtractionFunction.translateCastToTimeExtract(rexNode, TimeZone.getTimeZone(druidQuery.getConnectionConfig().timeZone()));
                if (extractionFunction == null) {
                    // no extraction Function means cast is not valid thus bail out
                    return Pair.of(null, null);
                }
            } else {
                extractionFunction = null;
            }
            break;
        default:
            return Pair.of(null, null);
    }
    return Pair.of(columnName, extractionFunction);
}
Also used : RexCall(org.apache.calcite.rex.RexCall) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName)

Example 98 with SqlTypeName

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName in project calcite by apache.

the class DruidTableFactory method create.

// name that is also the same name as a complex metric
public Table create(SchemaPlus schema, String name, Map operand, RelDataType rowType) {
    final DruidSchema druidSchema = schema.unwrap(DruidSchema.class);
    // If "dataSource" operand is present it overrides the table name.
    final String dataSource = (String) operand.get("dataSource");
    final Set<String> metricNameBuilder = new LinkedHashSet<>();
    final Map<String, SqlTypeName> fieldBuilder = new LinkedHashMap<>();
    final Map<String, List<ComplexMetric>> complexMetrics = new HashMap<>();
    final String timestampColumnName;
    if (operand.get("timestampColumn") != null) {
        timestampColumnName = (String) operand.get("timestampColumn");
    } else {
        timestampColumnName = DruidTable.DEFAULT_TIMESTAMP_COLUMN;
    }
    fieldBuilder.put(timestampColumnName, SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE);
    final Object dimensionsRaw = operand.get("dimensions");
    if (dimensionsRaw instanceof List) {
        // noinspection unchecked
        final List<String> dimensions = (List<String>) dimensionsRaw;
        for (String dimension : dimensions) {
            fieldBuilder.put(dimension, SqlTypeName.VARCHAR);
        }
    }
    // init the complex metric map
    final Object complexMetricsRaw = operand.get("complexMetrics");
    if (complexMetricsRaw instanceof List) {
        // noinspection unchecked
        final List<String> complexMetricList = (List<String>) complexMetricsRaw;
        for (String metric : complexMetricList) {
            complexMetrics.put(metric, new ArrayList<ComplexMetric>());
        }
    }
    final Object metricsRaw = operand.get("metrics");
    if (metricsRaw instanceof List) {
        final List metrics = (List) metricsRaw;
        for (Object metric : metrics) {
            DruidType druidType = DruidType.LONG;
            final String metricName;
            String fieldName = null;
            if (metric instanceof Map) {
                Map map2 = (Map) metric;
                if (!(map2.get("name") instanceof String)) {
                    throw new IllegalArgumentException("metric must have name");
                }
                metricName = (String) map2.get("name");
                final String type = (String) map2.get("type");
                fieldName = (String) map2.get("fieldName");
                druidType = DruidType.getTypeFromMetric(type);
            } else {
                metricName = (String) metric;
            }
            if (!druidType.isComplex()) {
                fieldBuilder.put(metricName, druidType.sqlType);
                metricNameBuilder.add(metricName);
            } else {
                assert fieldName != null;
                // Only add the complex metric if there exists an alias for it
                if (complexMetrics.containsKey(fieldName)) {
                    SqlTypeName type = fieldBuilder.get(fieldName);
                    if (type != SqlTypeName.VARCHAR) {
                        fieldBuilder.put(fieldName, SqlTypeName.VARBINARY);
                    // else, this complex metric is also a dimension, so it's type should remain as
                    // VARCHAR, but it'll also be added as a complex metric.
                    }
                    complexMetrics.get(fieldName).add(new ComplexMetric(metricName, druidType));
                }
            }
        }
    }
    final Object interval = operand.get("interval");
    final List<Interval> intervals;
    if (interval instanceof String) {
        intervals = ImmutableList.of(new Interval((String) interval, ISOChronology.getInstanceUTC()));
    } else {
        intervals = null;
    }
    final String dataSourceName = Util.first(dataSource, name);
    if (dimensionsRaw == null || metricsRaw == null) {
        DruidConnectionImpl connection = new DruidConnectionImpl(druidSchema.url, druidSchema.url.replace(":8082", ":8081"));
        return DruidTable.create(druidSchema, dataSourceName, intervals, fieldBuilder, metricNameBuilder, timestampColumnName, connection, complexMetrics);
    } else {
        return DruidTable.create(druidSchema, dataSourceName, intervals, fieldBuilder, metricNameBuilder, timestampColumnName, complexMetrics);
    }
}
Also used : LinkedHashSet(java.util.LinkedHashSet) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) LinkedHashMap(java.util.LinkedHashMap) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) Interval(org.joda.time.Interval)

Example 99 with SqlTypeName

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName in project calcite by apache.

the class RexUtil method canAssignFrom.

/**
 * Returns whether a value of {@code type2} can be assigned to a variable
 * of {@code type1}.
 *
 * <p>For example:
 * <ul>
 *   <li>{@code canAssignFrom(BIGINT, TINYINT)} returns {@code true}</li>
 *   <li>{@code canAssignFrom(TINYINT, BIGINT)} returns {@code false}</li>
 *   <li>{@code canAssignFrom(BIGINT, VARCHAR)} returns {@code false}</li>
 * </ul>
 */
private static boolean canAssignFrom(RelDataType type1, RelDataType type2) {
    final SqlTypeName name1 = type1.getSqlTypeName();
    final SqlTypeName name2 = type2.getSqlTypeName();
    if (name1.getFamily() == name2.getFamily()) {
        switch(name1.getFamily()) {
            case NUMERIC:
                return name1.compareTo(name2) >= 0;
            default:
                return true;
        }
    }
    return false;
}
Also used : SqlTypeName(org.apache.calcite.sql.type.SqlTypeName)

Example 100 with SqlTypeName

use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeName in project calcite by apache.

the class RexUtil method isLosslessCast.

/**
 * Returns whether the input is a 'loss-less' cast, that is, a cast from which
 * the original value of the field can be certainly recovered.
 *
 * <p>For instance, int &rarr; bigint is loss-less (as you can cast back to
 * int without loss of information), but bigint &rarr; int is not loss-less.
 *
 * <p>The implementation of this method does not return false positives.
 * However, it is not complete.
 */
public static boolean isLosslessCast(RexNode node) {
    if (!node.isA(SqlKind.CAST)) {
        return false;
    }
    final RelDataType source = ((RexCall) node).getOperands().get(0).getType();
    final SqlTypeName sourceSqlTypeName = source.getSqlTypeName();
    final RelDataType target = node.getType();
    final SqlTypeName targetSqlTypeName = target.getSqlTypeName();
    // 1) Both INT numeric types
    if (SqlTypeFamily.INTEGER.getTypeNames().contains(sourceSqlTypeName) && SqlTypeFamily.INTEGER.getTypeNames().contains(targetSqlTypeName)) {
        return targetSqlTypeName.compareTo(sourceSqlTypeName) >= 0;
    }
    // 2) Both CHARACTER types: it depends on the precision (length)
    if (SqlTypeFamily.CHARACTER.getTypeNames().contains(sourceSqlTypeName) && SqlTypeFamily.CHARACTER.getTypeNames().contains(targetSqlTypeName)) {
        return targetSqlTypeName.compareTo(sourceSqlTypeName) >= 0 && source.getPrecision() <= target.getPrecision();
    }
    // 3) From NUMERIC family to CHARACTER family: it depends on the precision/scale
    if (sourceSqlTypeName.getFamily() == SqlTypeFamily.NUMERIC && targetSqlTypeName.getFamily() == SqlTypeFamily.CHARACTER) {
        // include sign
        int sourceLength = source.getPrecision() + 1;
        if (source.getScale() != -1 && source.getScale() != 0) {
            // include decimal mark
            sourceLength += source.getScale() + 1;
        }
        return target.getPrecision() >= sourceLength;
    }
    // Return FALSE by default
    return false;
}
Also used : SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) RelDataType(org.apache.calcite.rel.type.RelDataType)

Aggregations

SqlTypeName (org.apache.calcite.sql.type.SqlTypeName)117 Test (org.junit.Test)38 RelDataType (org.apache.calcite.rel.type.RelDataType)28 RexNode (org.apache.calcite.rex.RexNode)18 BigDecimal (java.math.BigDecimal)13 ArrayList (java.util.ArrayList)13 List (java.util.List)11 RelDataTypeField (org.apache.calcite.rel.type.RelDataTypeField)9 RelDataTypeFactory (org.apache.calcite.rel.type.RelDataTypeFactory)8 ImmutableList (com.google.common.collect.ImmutableList)7 Map (java.util.Map)7 DateString (org.apache.calcite.util.DateString)7 TimeString (org.apache.calcite.util.TimeString)7 TimestampString (org.apache.calcite.util.TimestampString)7 ISE (io.druid.java.util.common.ISE)6 SqlKind (org.apache.calcite.sql.SqlKind)6 NlsString (org.apache.calcite.util.NlsString)6 Calendar (java.util.Calendar)5 Nullable (javax.annotation.Nullable)5 RexBuilder (org.apache.calcite.rex.RexBuilder)5