Search in sources :

Example 36 with HumanReadableBytes

use of org.apache.druid.java.util.common.HumanReadableBytes in project druid by druid-io.

the class UserCompactionTaskQueryTuningConfigTest method testSerde.

@Test
public void testSerde() throws IOException {
    final UserCompactionTaskQueryTuningConfig tuningConfig = new UserCompactionTaskQueryTuningConfig(40000, 2000L, null, new SegmentsSplitHintSpec(new HumanReadableBytes(42L), null), new DynamicPartitionsSpec(1000, 20000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), 100, 5, 1000L, new Duration(3000L), 7, 1000, 100);
    final String json = OBJECT_MAPPER.writeValueAsString(tuningConfig);
    final UserCompactionTaskQueryTuningConfig fromJson = OBJECT_MAPPER.readValue(json, UserCompactionTaskQueryTuningConfig.class);
    Assert.assertEquals(tuningConfig, fromJson);
}
Also used : SegmentsSplitHintSpec(org.apache.druid.data.input.SegmentsSplitHintSpec) IndexSpec(org.apache.druid.segment.IndexSpec) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) DefaultBitmapSerdeFactory(org.apache.druid.segment.data.BitmapSerde.DefaultBitmapSerdeFactory) Duration(org.joda.time.Duration) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) Test(org.junit.Test)

Example 37 with HumanReadableBytes

use of org.apache.druid.java.util.common.HumanReadableBytes in project druid by druid-io.

the class DataSourceCompactionConfigTest method testSerdeUserCompactionTuningConfig.

@Test
public void testSerdeUserCompactionTuningConfig() throws IOException {
    final UserCompactionTaskQueryTuningConfig tuningConfig = new UserCompactionTaskQueryTuningConfig(40000, 2000L, null, new SegmentsSplitHintSpec(new HumanReadableBytes(100000L), null), new DynamicPartitionsSpec(1000, 20000L), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.LZF, LongEncodingStrategy.LONGS), new IndexSpec(new DefaultBitmapSerdeFactory(), CompressionStrategy.LZ4, CompressionStrategy.UNCOMPRESSED, LongEncodingStrategy.AUTO), 2, 1000L, TmpFileSegmentWriteOutMediumFactory.instance(), 100, 5, 1000L, new Duration(3000L), 7, 1000, 100);
    final String json = OBJECT_MAPPER.writeValueAsString(tuningConfig);
    final UserCompactionTaskQueryTuningConfig fromJson = OBJECT_MAPPER.readValue(json, UserCompactionTaskQueryTuningConfig.class);
    Assert.assertEquals(tuningConfig, fromJson);
}
Also used : SegmentsSplitHintSpec(org.apache.druid.data.input.SegmentsSplitHintSpec) IndexSpec(org.apache.druid.segment.IndexSpec) DynamicPartitionsSpec(org.apache.druid.indexer.partitions.DynamicPartitionsSpec) DefaultBitmapSerdeFactory(org.apache.druid.segment.data.BitmapSerde.DefaultBitmapSerdeFactory) Duration(org.joda.time.Duration) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 38 with HumanReadableBytes

use of org.apache.druid.java.util.common.HumanReadableBytes in project druid by druid-io.

the class StringSqlAggregator method toDruidAggregation.

@Nullable
@Override
public Aggregation toDruidAggregation(PlannerContext plannerContext, RowSignature rowSignature, VirtualColumnRegistry virtualColumnRegistry, RexBuilder rexBuilder, String name, AggregateCall aggregateCall, Project project, List<Aggregation> existingAggregations, boolean finalizeAggregations) {
    final List<DruidExpression> arguments = aggregateCall.getArgList().stream().map(i -> Expressions.fromFieldAccess(rowSignature, project, i)).map(rexNode -> Expressions.toDruidExpression(plannerContext, rowSignature, rexNode)).collect(Collectors.toList());
    if (arguments.stream().anyMatch(Objects::isNull)) {
        return null;
    }
    RexNode separatorNode = Expressions.fromFieldAccess(rowSignature, project, aggregateCall.getArgList().get(1));
    if (!separatorNode.isA(SqlKind.LITERAL)) {
        // separator must be a literal
        return null;
    }
    String separator = RexLiteral.stringValue(separatorNode);
    if (separator == null) {
        // separator must not be null
        return null;
    }
    Integer maxSizeBytes = null;
    if (arguments.size() > 2) {
        RexNode maxBytes = Expressions.fromFieldAccess(rowSignature, project, aggregateCall.getArgList().get(2));
        if (!maxBytes.isA(SqlKind.LITERAL)) {
            // maxBytes must be a literal
            return null;
        }
        maxSizeBytes = ((Number) RexLiteral.value(maxBytes)).intValue();
    }
    final DruidExpression arg = arguments.get(0);
    final ExprMacroTable macroTable = plannerContext.getExprMacroTable();
    final String initialvalue = "[]";
    final ColumnType elementType = ColumnType.STRING;
    final String fieldName;
    if (arg.isDirectColumnAccess()) {
        fieldName = arg.getDirectColumn();
    } else {
        fieldName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(arg, elementType);
    }
    final String finalizer = StringUtils.format("if(array_length(o) == 0, null, array_to_string(o, '%s'))", separator);
    final NotDimFilter dimFilter = new NotDimFilter(new SelectorDimFilter(fieldName, null, null));
    if (aggregateCall.isDistinct()) {
        return Aggregation.create(// string_agg ignores nulls
        new FilteredAggregatorFactory(new ExpressionLambdaAggregatorFactory(name, ImmutableSet.of(fieldName), null, initialvalue, null, true, false, false, StringUtils.format("array_set_add(\"__acc\", \"%s\")", fieldName), StringUtils.format("array_set_add_all(\"__acc\", \"%s\")", name), null, finalizer, maxSizeBytes != null ? new HumanReadableBytes(maxSizeBytes) : null, macroTable), dimFilter));
    } else {
        return Aggregation.create(// string_agg ignores nulls
        new FilteredAggregatorFactory(new ExpressionLambdaAggregatorFactory(name, ImmutableSet.of(fieldName), null, initialvalue, null, true, false, false, StringUtils.format("array_append(\"__acc\", \"%s\")", fieldName), StringUtils.format("array_concat(\"__acc\", \"%s\")", name), null, finalizer, maxSizeBytes != null ? new HumanReadableBytes(maxSizeBytes) : null, macroTable), dimFilter));
    }
}
Also used : Project(org.apache.calcite.rel.core.Project) SqlAggregator(org.apache.druid.sql.calcite.aggregation.SqlAggregator) FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) RowSignatures(org.apache.druid.sql.calcite.table.RowSignatures) UnsupportedSQLQueryException(org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException) DruidExpression(org.apache.druid.sql.calcite.expression.DruidExpression) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) Optionality(org.apache.calcite.util.Optionality) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) RexNode(org.apache.calcite.rex.RexNode) VirtualColumnRegistry(org.apache.druid.sql.calcite.rel.VirtualColumnRegistry) PlannerContext(org.apache.druid.sql.calcite.planner.PlannerContext) Nullable(javax.annotation.Nullable) SqlOperatorBinding(org.apache.calcite.sql.SqlOperatorBinding) RelDataType(org.apache.calcite.rel.type.RelDataType) ImmutableSet(com.google.common.collect.ImmutableSet) SqlKind(org.apache.calcite.sql.SqlKind) SqlTypeFamily(org.apache.calcite.sql.type.SqlTypeFamily) ExpressionLambdaAggregatorFactory(org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory) NotDimFilter(org.apache.druid.query.filter.NotDimFilter) SqlTypeName(org.apache.calcite.sql.type.SqlTypeName) InferTypes(org.apache.calcite.sql.type.InferTypes) RexBuilder(org.apache.calcite.rex.RexBuilder) RexLiteral(org.apache.calcite.rex.RexLiteral) SqlFunctionCategory(org.apache.calcite.sql.SqlFunctionCategory) StringUtils(org.apache.druid.java.util.common.StringUtils) Aggregation(org.apache.druid.sql.calcite.aggregation.Aggregation) Collectors(java.util.stream.Collectors) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) Objects(java.util.Objects) SqlReturnTypeInference(org.apache.calcite.sql.type.SqlReturnTypeInference) List(java.util.List) RowSignature(org.apache.druid.segment.column.RowSignature) OperandTypes(org.apache.calcite.sql.type.OperandTypes) ColumnType(org.apache.druid.segment.column.ColumnType) AggregateCall(org.apache.calcite.rel.core.AggregateCall) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) Calcites(org.apache.druid.sql.calcite.planner.Calcites) Expressions(org.apache.druid.sql.calcite.expression.Expressions) FilteredAggregatorFactory(org.apache.druid.query.aggregation.FilteredAggregatorFactory) ColumnType(org.apache.druid.segment.column.ColumnType) NotDimFilter(org.apache.druid.query.filter.NotDimFilter) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) ExpressionLambdaAggregatorFactory(org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory) DruidExpression(org.apache.druid.sql.calcite.expression.DruidExpression) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) Objects(java.util.Objects) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) RexNode(org.apache.calcite.rex.RexNode) Nullable(javax.annotation.Nullable)

Example 39 with HumanReadableBytes

use of org.apache.druid.java.util.common.HumanReadableBytes in project druid by druid-io.

the class ArraySqlAggregator method toDruidAggregation.

@Nullable
@Override
public Aggregation toDruidAggregation(PlannerContext plannerContext, RowSignature rowSignature, VirtualColumnRegistry virtualColumnRegistry, RexBuilder rexBuilder, String name, AggregateCall aggregateCall, Project project, List<Aggregation> existingAggregations, boolean finalizeAggregations) {
    final List<RexNode> arguments = aggregateCall.getArgList().stream().map(i -> Expressions.fromFieldAccess(rowSignature, project, i)).collect(Collectors.toList());
    Integer maxSizeBytes = null;
    if (arguments.size() > 1) {
        RexNode maxBytes = arguments.get(1);
        if (!maxBytes.isA(SqlKind.LITERAL)) {
            // maxBytes must be a literal
            return null;
        }
        maxSizeBytes = ((Number) RexLiteral.value(maxBytes)).intValue();
    }
    final DruidExpression arg = Expressions.toDruidExpression(plannerContext, rowSignature, arguments.get(0));
    if (arg == null) {
        // can't translate argument
        return null;
    }
    final ExprMacroTable macroTable = plannerContext.getExprMacroTable();
    final String fieldName;
    final String initialvalue;
    final ColumnType druidType = Calcites.getValueTypeForRelDataTypeFull(aggregateCall.getType());
    final ColumnType elementType;
    if (druidType == null || !druidType.isArray()) {
        initialvalue = "[]";
        elementType = ColumnType.STRING;
    } else {
        initialvalue = ExpressionType.fromColumnTypeStrict(druidType).asTypeString() + "[]";
        elementType = (ColumnType) druidType.getElementType();
    }
    if (arg.isDirectColumnAccess()) {
        fieldName = arg.getDirectColumn();
    } else {
        fieldName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(arg, elementType);
    }
    if (aggregateCall.isDistinct()) {
        return Aggregation.create(new ExpressionLambdaAggregatorFactory(name, ImmutableSet.of(fieldName), null, initialvalue, null, true, true, false, StringUtils.format("array_set_add(\"__acc\", \"%s\")", fieldName), StringUtils.format("array_set_add_all(\"__acc\", \"%s\")", name), null, null, maxSizeBytes != null ? new HumanReadableBytes(maxSizeBytes) : null, macroTable));
    } else {
        return Aggregation.create(new ExpressionLambdaAggregatorFactory(name, ImmutableSet.of(fieldName), null, initialvalue, null, true, true, false, StringUtils.format("array_append(\"__acc\", \"%s\")", fieldName), StringUtils.format("array_concat(\"__acc\", \"%s\")", name), null, null, maxSizeBytes != null ? new HumanReadableBytes(maxSizeBytes) : null, macroTable));
    }
}
Also used : Project(org.apache.calcite.rel.core.Project) SqlAggregator(org.apache.druid.sql.calcite.aggregation.SqlAggregator) RowSignatures(org.apache.druid.sql.calcite.table.RowSignatures) UnsupportedSQLQueryException(org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException) DruidExpression(org.apache.druid.sql.calcite.expression.DruidExpression) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) Optionality(org.apache.calcite.util.Optionality) RexNode(org.apache.calcite.rex.RexNode) ExpressionType(org.apache.druid.math.expr.ExpressionType) VirtualColumnRegistry(org.apache.druid.sql.calcite.rel.VirtualColumnRegistry) PlannerContext(org.apache.druid.sql.calcite.planner.PlannerContext) Nullable(javax.annotation.Nullable) SqlOperatorBinding(org.apache.calcite.sql.SqlOperatorBinding) RelDataType(org.apache.calcite.rel.type.RelDataType) ImmutableSet(com.google.common.collect.ImmutableSet) SqlKind(org.apache.calcite.sql.SqlKind) SqlTypeFamily(org.apache.calcite.sql.type.SqlTypeFamily) ExpressionLambdaAggregatorFactory(org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory) InferTypes(org.apache.calcite.sql.type.InferTypes) RexBuilder(org.apache.calcite.rex.RexBuilder) RexLiteral(org.apache.calcite.rex.RexLiteral) SqlFunctionCategory(org.apache.calcite.sql.SqlFunctionCategory) StringUtils(org.apache.druid.java.util.common.StringUtils) Aggregation(org.apache.druid.sql.calcite.aggregation.Aggregation) Collectors(java.util.stream.Collectors) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) SqlReturnTypeInference(org.apache.calcite.sql.type.SqlReturnTypeInference) List(java.util.List) RowSignature(org.apache.druid.segment.column.RowSignature) OperandTypes(org.apache.calcite.sql.type.OperandTypes) ColumnType(org.apache.druid.segment.column.ColumnType) AggregateCall(org.apache.calcite.rel.core.AggregateCall) SqlAggFunction(org.apache.calcite.sql.SqlAggFunction) Calcites(org.apache.druid.sql.calcite.planner.Calcites) Expressions(org.apache.druid.sql.calcite.expression.Expressions) ColumnType(org.apache.druid.segment.column.ColumnType) ExpressionLambdaAggregatorFactory(org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory) DruidExpression(org.apache.druid.sql.calcite.expression.DruidExpression) HumanReadableBytes(org.apache.druid.java.util.common.HumanReadableBytes) ExprMacroTable(org.apache.druid.math.expr.ExprMacroTable) RexNode(org.apache.calcite.rex.RexNode) Nullable(javax.annotation.Nullable)

Aggregations

HumanReadableBytes (org.apache.druid.java.util.common.HumanReadableBytes)39 Test (org.junit.Test)35 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)19 List (java.util.List)7 MaxSizeSplitHintSpec (org.apache.druid.data.input.MaxSizeSplitHintSpec)6 ExpressionLambdaAggregatorFactory (org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory)6 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)4 InputSplit (org.apache.druid.data.input.InputSplit)4 SegmentsSplitHintSpec (org.apache.druid.data.input.SegmentsSplitHintSpec)4 DynamicPartitionsSpec (org.apache.druid.indexer.partitions.DynamicPartitionsSpec)4 ImmutableSet (com.google.common.collect.ImmutableSet)3 File (java.io.File)3 Collectors (java.util.stream.Collectors)3 Nullable (javax.annotation.Nullable)3 AggregateCall (org.apache.calcite.rel.core.AggregateCall)3 Project (org.apache.calcite.rel.core.Project)3 RexBuilder (org.apache.calcite.rex.RexBuilder)3 RexLiteral (org.apache.calcite.rex.RexLiteral)3 RexNode (org.apache.calcite.rex.RexNode)3 SqlAggFunction (org.apache.calcite.sql.SqlAggFunction)3