Search in sources :

Example 11 with ColumnType

use of org.apache.druid.segment.column.ColumnType in project druid by druid-io.

the class ArrayConcatSqlAggregator method toDruidAggregation.

@Nullable
@Override
public Aggregation toDruidAggregation(PlannerContext plannerContext, RowSignature rowSignature, VirtualColumnRegistry virtualColumnRegistry, RexBuilder rexBuilder, String name, AggregateCall aggregateCall, Project project, List<Aggregation> existingAggregations, boolean finalizeAggregations) {
    final List<RexNode> arguments = aggregateCall.getArgList().stream().map(i -> Expressions.fromFieldAccess(rowSignature, project, i)).collect(Collectors.toList());
    Integer maxSizeBytes = null;
    if (arguments.size() > 1) {
        RexNode maxBytes = arguments.get(1);
        if (!maxBytes.isA(SqlKind.LITERAL)) {
            // maxBytes must be a literal
            return null;
        }
        maxSizeBytes = ((Number) RexLiteral.value(maxBytes)).intValue();
    }
    final DruidExpression arg = Expressions.toDruidExpression(plannerContext, rowSignature, arguments.get(0));
    final ExprMacroTable macroTable = plannerContext.getExprMacroTable();
    final String fieldName;
    final ColumnType druidType = Calcites.getValueTypeForRelDataTypeFull(aggregateCall.getType());
    if (druidType == null || !druidType.isArray()) {
        // must be an array
        return null;
    }
    final String initialvalue = ExpressionType.fromColumnTypeStrict(druidType).asTypeString() + "[]";
    if (arg.isDirectColumnAccess()) {
        fieldName = arg.getDirectColumn();
    } else {
        VirtualColumn vc = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, arg, druidType);
        fieldName = vc.getOutputName();
    }
    if (aggregateCall.isDistinct()) {
        return Aggregation.create(new ExpressionLambdaAggregatorFactory(name, ImmutableSet.of(fieldName), null, initialvalue, null, true, false, false, StringUtils.format("array_set_add_all(\"__acc\", \"%s\")", fieldName), StringUtils.format("array_set_add_all(\"__acc\", \"%s\")", name), null, null, maxSizeBytes != null ? new HumanReadableBytes(maxSizeBytes) : null, macroTable));
    } else {
        return Aggregation.create(new ExpressionLambdaAggregatorFactory(name, ImmutableSet.of(fieldName), null, initialvalue, null, true, false, false, StringUtils.format("array_concat(\"__acc\", \"%s\")", fieldName), StringUtils.format("array_concat(\"__acc\", \"%s\")", name), null, null, maxSizeBytes != null ? new HumanReadableBytes(maxSizeBytes) : null, macroTable));
    }
}
Also used : Project(org.apache.calcite.rel.core.Project) SqlAggregator(org.apache.druid.sql.calcite.aggregation.SqlAggregator) ReturnTypes(org.apache.calcite.sql.type.ReturnTypes) DruidExpression(org.apache.druid.sql.calcite.expression.DruidExpression) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) Optionality(org.apache.calcite.util.Optionality) RexNode(org.apache.calcite.rex.RexNode) ExpressionType(org.apache.druid.math.expr.ExpressionType) VirtualColumnRegistry(org.apache.druid.sql.calcite.rel.VirtualColumnRegistry) PlannerContext(org.apache.druid.sql.calcite.planner.PlannerContext) Nullable(javax.annotation.Nullable) ImmutableSet(com.google.common.collect.ImmutableSet) SqlKind(org.apache.calcite.sql.SqlKind) ExpressionLambdaAggregatorFactory(org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory) InferTypes(org.apache.calcite.sql.type.InferTypes) RexBuilder(org.apache.calcite.rex.RexBuilder) RexLiteral(org.apache.calcite.rex.RexLiteral) VirtualColumn(org.apache.druid.segment.VirtualColumn) SqlFunctionCategory(org.apache.calcite.sql.SqlFunctionCategory) StringUtils(org.apache.druid.java.util.common.StringUtils) Aggregation(org.apache.druid.sql.calcite.aggregation.Aggregation) Collectors(java.util.stream.Collectors) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) List(java.util.List) RowSignature(org.apache.druid.segment.column.RowSignature) OperandTypes(org.apache.calcite.sql.type.OperandTypes) ColumnType(org.apache.druid.segment.column.ColumnType) AggregateCall(org.apache.calcite.rel.core.AggregateCall) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) Calcites(org.apache.druid.sql.calcite.planner.Calcites) Expressions(org.apache.druid.sql.calcite.expression.Expressions) ColumnType(org.apache.druid.segment.column.ColumnType) ExpressionLambdaAggregatorFactory(org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory) DruidExpression(org.apache.druid.sql.calcite.expression.DruidExpression) VirtualColumn(org.apache.druid.segment.VirtualColumn) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) RexNode(org.apache.calcite.rex.RexNode) Nullable(javax.annotation.Nullable)

Example 12 with ColumnType

use of org.apache.druid.segment.column.ColumnType in project druid by druid-io.

the class Expressions method literalToDruidExpression.

@Nullable
private static DruidExpression literalToDruidExpression(final PlannerContext plannerContext, final RexNode rexNode) {
    final SqlTypeName sqlTypeName = rexNode.getType().getSqlTypeName();
    // Translate literal.
    final ColumnType columnType = Calcites.getColumnTypeForRelDataType(rexNode.getType());
    if (RexLiteral.isNullLiteral(rexNode)) {
        return DruidExpression.ofLiteral(columnType, DruidExpression.nullLiteral());
    } else if (SqlTypeName.NUMERIC_TYPES.contains(sqlTypeName)) {
        return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral((Number) RexLiteral.value(rexNode)));
    } else if (SqlTypeFamily.INTERVAL_DAY_TIME == sqlTypeName.getFamily()) {
        // Calcite represents DAY-TIME intervals in milliseconds.
        final long milliseconds = ((Number) RexLiteral.value(rexNode)).longValue();
        return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(milliseconds));
    } else if (SqlTypeFamily.INTERVAL_YEAR_MONTH == sqlTypeName.getFamily()) {
        // Calcite represents YEAR-MONTH intervals in months.
        final long months = ((Number) RexLiteral.value(rexNode)).longValue();
        return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(months));
    } else if (SqlTypeName.STRING_TYPES.contains(sqlTypeName)) {
        return DruidExpression.ofStringLiteral(RexLiteral.stringValue(rexNode));
    } else if (SqlTypeName.TIMESTAMP == sqlTypeName || SqlTypeName.DATE == sqlTypeName) {
        if (RexLiteral.isNullLiteral(rexNode)) {
            return DruidExpression.ofLiteral(columnType, DruidExpression.nullLiteral());
        } else {
            return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(Calcites.calciteDateTimeLiteralToJoda(rexNode, plannerContext.getTimeZone()).getMillis()));
        }
    } else if (SqlTypeName.BOOLEAN == sqlTypeName) {
        return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(RexLiteral.booleanValue(rexNode) ? 1 : 0));
    } else {
        // Can't translate other literals.
        return null;
    }
}
Also used : ColumnType(org.apache.druid.segment.column.ColumnType) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) Nullable(javax.annotation.Nullable)

Example 13 with ColumnType

use of org.apache.druid.segment.column.ColumnType in project druid by druid-io.

the class TimeArithmeticOperatorConversion method toDruidExpression.

@Override
public DruidExpression toDruidExpression(final PlannerContext plannerContext, final RowSignature rowSignature, final RexNode rexNode) {
    final RexCall call = (RexCall) rexNode;
    final List<RexNode> operands = call.getOperands();
    if (operands.size() != 2) {
        throw new IAE("Expected 2 args, got %s", operands.size());
    }
    final RexNode leftRexNode = operands.get(0);
    final RexNode rightRexNode = operands.get(1);
    final DruidExpression leftExpr = Expressions.toDruidExpression(plannerContext, rowSignature, leftRexNode);
    final DruidExpression rightExpr = Expressions.toDruidExpression(plannerContext, rowSignature, rightRexNode);
    if (leftExpr == null || rightExpr == null) {
        return null;
    }
    final ColumnType outputType = Calcites.getColumnTypeForRelDataType(rexNode.getType());
    if (rightRexNode.getType().getFamily() == SqlTypeFamily.INTERVAL_YEAR_MONTH) {
        // Period is a value in months.
        return DruidExpression.ofExpression(outputType, DruidExpression.functionCall("timestamp_shift"), ImmutableList.of(leftExpr, rightExpr.map(simpleExtraction -> null, expression -> rightRexNode.isA(SqlKind.LITERAL) ? StringUtils.format("'P%sM'", RexLiteral.value(rightRexNode)) : StringUtils.format("concat('P', %s, 'M')", expression)), DruidExpression.ofLiteral(ColumnType.LONG, DruidExpression.numberLiteral(direction > 0 ? 1 : -1)), DruidExpression.ofStringLiteral(plannerContext.getTimeZone().getID())));
    } else if (rightRexNode.getType().getFamily() == SqlTypeFamily.INTERVAL_DAY_TIME) {
        // Period is a value in milliseconds. Ignore time zone.
        return DruidExpression.ofExpression(outputType, (args) -> StringUtils.format("(%s %s %s)", args.get(0).getExpression(), direction > 0 ? "+" : "-", args.get(1).getExpression()), ImmutableList.of(leftExpr, rightExpr));
    } else if ((leftRexNode.getType().getFamily() == SqlTypeFamily.TIMESTAMP || leftRexNode.getType().getFamily() == SqlTypeFamily.DATE) && (rightRexNode.getType().getFamily() == SqlTypeFamily.TIMESTAMP || rightRexNode.getType().getFamily() == SqlTypeFamily.DATE)) {
        // Calcite represents both TIMESTAMP - INTERVAL and TIMESTAMPDIFF (TIMESTAMP - TIMESTAMP)
        // with a MINUS_DATE operator, so we must tell which case we're in by checking the type of
        // the second argument.
        Preconditions.checkState(direction < 0, "Time arithmetic require direction < 0");
        if (call.getType().getFamily() == SqlTypeFamily.INTERVAL_YEAR_MONTH) {
            return DruidExpression.ofExpression(outputType, DruidExpression.functionCall("subtract_months"), ImmutableList.of(leftExpr, rightExpr, DruidExpression.ofStringLiteral(plannerContext.getTimeZone().getID())));
        } else {
            return DruidExpression.ofExpression(outputType, (args) -> StringUtils.format("(%s %s %s)", args.get(0).getExpression(), "-", args.get(1).getExpression()), ImmutableList.of(leftExpr, rightExpr));
        }
    } else {
        // Shouldn't happen if subclasses are behaving.
        throw new ISE("Got unexpected type period type family[%s]", rightRexNode.getType().getFamily());
    }
}
Also used : RexCall(org.apache.calcite.rex.RexCall) SqlKind(org.apache.calcite.sql.SqlKind) SqlTypeFamily(org.apache.calcite.sql.type.SqlTypeFamily) SqlOperatorConversion(org.apache.druid.sql.calcite.expression.SqlOperatorConversion) RexLiteral(org.apache.calcite.rex.RexLiteral) StringUtils(org.apache.druid.java.util.common.StringUtils) ISE(org.apache.druid.java.util.common.ISE) DruidExpression(org.apache.druid.sql.calcite.expression.DruidExpression) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) SqlStdOperatorTable(org.apache.calcite.sql.fun.SqlStdOperatorTable) RexNode(org.apache.calcite.rex.RexNode) RowSignature(org.apache.druid.segment.column.RowSignature) PlannerContext(org.apache.druid.sql.calcite.planner.PlannerContext) ColumnType(org.apache.druid.segment.column.ColumnType) Preconditions(com.google.common.base.Preconditions) SqlOperator(org.apache.calcite.sql.SqlOperator) IAE(org.apache.druid.java.util.common.IAE) Calcites(org.apache.druid.sql.calcite.planner.Calcites) RexCall(org.apache.calcite.rex.RexCall) Expressions(org.apache.druid.sql.calcite.expression.Expressions) ColumnType(org.apache.druid.segment.column.ColumnType) DruidExpression(org.apache.druid.sql.calcite.expression.DruidExpression) ISE(org.apache.druid.java.util.common.ISE) IAE(org.apache.druid.java.util.common.IAE) RexNode(org.apache.calcite.rex.RexNode)

Example 14 with ColumnType

use of org.apache.druid.segment.column.ColumnType in project druid by druid-io.

the class InputRowSerde method fromBytes.

public static InputRow fromBytes(final Map<String, IndexSerdeTypeHelper> typeHelperMap, byte[] data, AggregatorFactory[] aggs) {
    try {
        ByteArrayDataInput in = ByteStreams.newDataInput(data);
        // Read timestamp
        long timestamp = in.readLong();
        Map<String, Object> event = new HashMap<>();
        // Read dimensions
        List<String> dimensions = new ArrayList<>();
        int dimNum = WritableUtils.readVInt(in);
        for (int i = 0; i < dimNum; i++) {
            String dimension = readString(in);
            dimensions.add(dimension);
            IndexSerdeTypeHelper typeHelper = typeHelperMap.get(dimension);
            if (typeHelper == null) {
                typeHelper = STRING_HELPER;
            }
            Object dimValues = typeHelper.deserialize(in);
            if (dimValues == null) {
                continue;
            }
            if (typeHelper.getType() == ValueType.STRING) {
                List<String> dimensionValues = (List<String>) dimValues;
                if (dimensionValues.size() == 1) {
                    event.put(dimension, dimensionValues.get(0));
                } else {
                    event.put(dimension, dimensionValues);
                }
            } else {
                event.put(dimension, dimValues);
            }
        }
        // Read metrics
        int metricSize = WritableUtils.readVInt(in);
        for (int i = 0; i < metricSize; i++) {
            final String metric = readString(in);
            final AggregatorFactory agg = getAggregator(metric, aggs, i);
            final ColumnType type = agg.getIntermediateType();
            final byte metricNullability = in.readByte();
            if (metricNullability == NullHandling.IS_NULL_BYTE) {
                // metric value is null.
                continue;
            }
            if (type.is(ValueType.FLOAT)) {
                event.put(metric, in.readFloat());
            } else if (type.is(ValueType.LONG)) {
                event.put(metric, WritableUtils.readVLong(in));
            } else if (type.is(ValueType.DOUBLE)) {
                event.put(metric, in.readDouble());
            } else {
                ComplexMetricSerde serde = getComplexMetricSerde(agg.getIntermediateType().getComplexTypeName());
                byte[] value = readBytes(in);
                event.put(metric, serde.fromBytes(value, 0, value.length));
            }
        }
        return new MapBasedInputRow(timestamp, dimensions, event);
    } catch (IOException ex) {
        throw new RuntimeException(ex);
    }
}
Also used : ColumnType(org.apache.druid.segment.column.ColumnType) ComplexMetricSerde(org.apache.druid.segment.serde.ComplexMetricSerde) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) IOException(java.io.IOException) ByteArrayDataInput(com.google.common.io.ByteArrayDataInput) AggregatorFactory(org.apache.druid.query.aggregation.AggregatorFactory) ArrayList(java.util.ArrayList) List(java.util.List) MapBasedInputRow(org.apache.druid.data.input.MapBasedInputRow)

Example 15 with ColumnType

use of org.apache.druid.segment.column.ColumnType in project druid by druid-io.

the class InlineDataSource method fromJson.

/**
 * Factory method for Jackson. Used for inline datasources that were originally encoded as JSON. Private because
 * non-Jackson callers should use {@link #fromIterable}.
 */
@JsonCreator
private static InlineDataSource fromJson(@JsonProperty("columnNames") List<String> columnNames, @JsonProperty("columnTypes") List<ColumnType> columnTypes, @JsonProperty("rows") List<Object[]> rows) {
    Preconditions.checkNotNull(columnNames, "'columnNames' must be nonnull");
    if (columnTypes != null && columnNames.size() != columnTypes.size()) {
        throw new IAE("columnNames and columnTypes must be the same length");
    }
    final RowSignature.Builder builder = RowSignature.builder();
    for (int i = 0; i < columnNames.size(); i++) {
        final String name = columnNames.get(i);
        final ColumnType type = columnTypes != null ? columnTypes.get(i) : null;
        builder.add(name, type);
    }
    return new InlineDataSource(rows, builder.build());
}
Also used : ColumnType(org.apache.druid.segment.column.ColumnType) IAE(org.apache.druid.java.util.common.IAE) RowSignature(org.apache.druid.segment.column.RowSignature) JsonCreator(com.fasterxml.jackson.annotation.JsonCreator)

Aggregations

ColumnType (org.apache.druid.segment.column.ColumnType)43 Nullable (javax.annotation.Nullable)16 ISE (org.apache.druid.java.util.common.ISE)15 RowSignature (org.apache.druid.segment.column.RowSignature)14 AggregatorFactory (org.apache.druid.query.aggregation.AggregatorFactory)13 RexNode (org.apache.calcite.rex.RexNode)12 DruidExpression (org.apache.druid.sql.calcite.expression.DruidExpression)12 List (java.util.List)11 IAE (org.apache.druid.java.util.common.IAE)11 RelDataType (org.apache.calcite.rel.type.RelDataType)9 DimensionSpec (org.apache.druid.query.dimension.DimensionSpec)9 Collectors (java.util.stream.Collectors)8 ArrayList (java.util.ArrayList)7 SqlAggFunction (org.apache.calcite.sql.SqlAggFunction)5 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)5 StringComparator (org.apache.druid.query.ordering.StringComparator)5 Aggregation (org.apache.druid.sql.calcite.aggregation.Aggregation)5 JsonCreator (com.fasterxml.jackson.annotation.JsonCreator)4 Preconditions (com.google.common.base.Preconditions)4 Collections (java.util.Collections)4