Search in sources :

Example 26 with PeriodGranularity

use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.

the class GroupByQueryRunnerTest method testDimFilterHavingSpec.

@Test
public void testDimFilterHavingSpec() {
    final DimFilterHavingSpec havingSpec = new DimFilterHavingSpec(new AndDimFilter(ImmutableList.of(new OrDimFilter(ImmutableList.of(new BoundDimFilter("rows", "2", null, true, false, null, null, StringComparators.NUMERIC), new SelectorDimFilter("idx", "217", null))), new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null))), null);
    GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), QueryRunnerTestHelper.INDEX_LONG_MIN, QueryRunnerTestHelper.INDEX_LONG_MAX, QueryRunnerTestHelper.INDEX_DOUBLE_MIN, QueryRunnerTestHelper.INDEX_DOUBLE_MAX, QueryRunnerTestHelper.INDEX_FLOAT_MIN, QueryRunnerTestHelper.INDEX_FLOAT_MAX).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setHavingSpec(havingSpec);
    final GroupByQuery fullQuery = builder.build();
    List<ResultRow> expectedResults = Arrays.asList(makeRow(fullQuery, "2011-04-01", "alias", "business", "rows", 2L, "idx", 217L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 105L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 112L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 105.735462D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 112.987027D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 105.73546F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 112.98703F), makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 107L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 1193L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 107.047773D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 1193.556278D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 107.047775F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 1193.5563F), makeRow(fullQuery, "2011-04-01", "alias", "premium", "rows", 6L, "idx", 4416L, QueryRunnerTestHelper.LONG_MIN_INDEX_METRIC, 122L, QueryRunnerTestHelper.LONG_MAX_INDEX_METRIC, 1321L, QueryRunnerTestHelper.DOUBLE_MIN_INDEX_METRIC, 122.141707D, QueryRunnerTestHelper.DOUBLE_MAX_INDEX_METRIC, 1321.375057D, QueryRunnerTestHelper.FLOAT_MIN_INDEX_METRIC, 122.14171F, QueryRunnerTestHelper.FLOAT_MAX_INDEX_METRIC, 1321.375F));
    TestHelper.assertExpectedObjects(expectedResults, GroupByQueryRunnerTestHelper.runQuery(factory, runner, fullQuery), "dimfilter-havingspec");
}
Also used : DimFilterHavingSpec(org.apache.druid.query.groupby.having.DimFilterHavingSpec) BoundDimFilter(org.apache.druid.query.filter.BoundDimFilter) AndDimFilter(org.apache.druid.query.filter.AndDimFilter) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) Period(org.joda.time.Period) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) OrDimFilter(org.apache.druid.query.filter.OrDimFilter) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 27 with PeriodGranularity

use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.

the class GroupByQueryRunnerTest method testBySegmentResults.

@Test
public void testBySegmentResults() {
    GroupByQuery.Builder builder = makeQueryBuilder().setDataSource(QueryRunnerTestHelper.DATA_SOURCE).setInterval("2011-04-02/2011-04-04").setDimensions(new DefaultDimensionSpec("quality", "alias")).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")).setGranularity(new PeriodGranularity(new Period("P1M"), null, null)).setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)).setContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true));
    final GroupByQuery fullQuery = builder.build();
    int segmentCount = 32;
    Result<BySegmentResultValue> singleSegmentResult = new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass<>(Collections.singletonList(makeRow(fullQuery, "2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L)), QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z")));
    List<Result> bySegmentResults = new ArrayList<>();
    for (int i = 0; i < segmentCount; i++) {
        bySegmentResults.add(singleSegmentResult);
    }
    QueryToolChest toolChest = factory.getToolchest();
    List<QueryRunner<ResultRow>> singleSegmentRunners = new ArrayList<>();
    for (int i = 0; i < segmentCount; i++) {
        singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner));
    }
    ExecutorService exec = Executors.newCachedThreadPool();
    QueryRunner theRunner = toolChest.postMergeQueryDecoration(new FinalizeResultsQueryRunner<>(toolChest.mergeResults(factory.mergeRunners(Executors.newCachedThreadPool(), singleSegmentRunners)), toolChest));
    TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery)), "bySegment");
    exec.shutdownNow();
}
Also used : BySegmentResultValue(org.apache.druid.query.BySegmentResultValue) LongSumAggregatorFactory(org.apache.druid.query.aggregation.LongSumAggregatorFactory) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) ArrayList(java.util.ArrayList) Period(org.joda.time.Period) QueryToolChest(org.apache.druid.query.QueryToolChest) DefaultDimensionSpec(org.apache.druid.query.dimension.DefaultDimensionSpec) FinalizeResultsQueryRunner(org.apache.druid.query.FinalizeResultsQueryRunner) ChainedExecutionQueryRunner(org.apache.druid.query.ChainedExecutionQueryRunner) QueryRunner(org.apache.druid.query.QueryRunner) Result(org.apache.druid.query.Result) SelectorDimFilter(org.apache.druid.query.filter.SelectorDimFilter) ExecutorService(java.util.concurrent.ExecutorService) InitializedNullHandlingTest(org.apache.druid.testing.InitializedNullHandlingTest) Test(org.junit.Test)

Example 28 with PeriodGranularity

use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.

the class DruidSqlParserUtils method convertSqlNodeToGranularity.

/**
 * This method is used to extract the granularity from a SqlNode representing following function calls:
 * 1. FLOOR(__time TO TimeUnit)
 * 2. TIME_FLOOR(__time, 'PT1H')
 *
 * Validation on the sqlNode is contingent to following conditions:
 * 1. sqlNode is an instance of SqlCall
 * 2. Operator is either one of TIME_FLOOR or FLOOR
 * 3. Number of operands in the call are 2
 * 4. First operand is a SimpleIdentifier representing __time
 * 5. If operator is TIME_FLOOR, the second argument is a literal, and can be converted to the Granularity class
 * 6. If operator is FLOOR, the second argument is a TimeUnit, and can be mapped using {@link TimeUnits}
 *
 * Since it is to be used primarily while parsing the SqlNode, it is wrapped in {@code convertSqlNodeToGranularityThrowingParseExceptions}
 *
 * @param sqlNode SqlNode representing a call to a function
 * @return Granularity as intended by the function call
 * @throws ParseException SqlNode cannot be converted a granularity
 */
public static Granularity convertSqlNodeToGranularity(SqlNode sqlNode) throws ParseException {
    final String genericParseFailedMessageFormatString = "Encountered %s after PARTITIONED BY. " + "Expected HOUR, DAY, MONTH, YEAR, ALL TIME, FLOOR function or %s function";
    if (!(sqlNode instanceof SqlCall)) {
        throw new ParseException(StringUtils.format(genericParseFailedMessageFormatString, sqlNode.toString(), TimeFloorOperatorConversion.SQL_FUNCTION_NAME));
    }
    SqlCall sqlCall = (SqlCall) sqlNode;
    String operatorName = sqlCall.getOperator().getName();
    Preconditions.checkArgument("FLOOR".equalsIgnoreCase(operatorName) || TimeFloorOperatorConversion.SQL_FUNCTION_NAME.equalsIgnoreCase(operatorName), StringUtils.format("PARTITIONED BY clause only supports FLOOR(__time TO <unit> and %s(__time, period) functions", TimeFloorOperatorConversion.SQL_FUNCTION_NAME));
    List<SqlNode> operandList = sqlCall.getOperandList();
    Preconditions.checkArgument(operandList.size() == 2, StringUtils.format("%s in PARTITIONED BY clause must have two arguments", operatorName));
    // Check if the first argument passed in the floor function is __time
    SqlNode timeOperandSqlNode = operandList.get(0);
    Preconditions.checkArgument(timeOperandSqlNode.getKind().equals(SqlKind.IDENTIFIER), StringUtils.format("First argument to %s in PARTITIONED BY clause can only be __time", operatorName));
    SqlIdentifier timeOperandSqlIdentifier = (SqlIdentifier) timeOperandSqlNode;
    Preconditions.checkArgument(timeOperandSqlIdentifier.getSimple().equals(ColumnHolder.TIME_COLUMN_NAME), StringUtils.format("First argument to %s in PARTITIONED BY clause can only be __time", operatorName));
    // If the floor function is of form TIME_FLOOR(__time, 'PT1H')
    if (operatorName.equalsIgnoreCase(TimeFloorOperatorConversion.SQL_FUNCTION_NAME)) {
        SqlNode granularitySqlNode = operandList.get(1);
        Preconditions.checkArgument(granularitySqlNode.getKind().equals(SqlKind.LITERAL), "Second argument to TIME_FLOOR in PARTITIONED BY clause must be a period like 'PT1H'");
        String granularityString = SqlLiteral.unchain(granularitySqlNode).toValue();
        Period period;
        try {
            period = new Period(granularityString);
        } catch (IllegalArgumentException e) {
            throw new ParseException(StringUtils.format("%s is an invalid period string", granularitySqlNode.toString()));
        }
        return new PeriodGranularity(period, null, null);
    } else if ("FLOOR".equalsIgnoreCase(operatorName)) {
        // If the floor function is of form FLOOR(__time TO DAY)
        SqlNode granularitySqlNode = operandList.get(1);
        // In future versions of Calcite, this can be checked via
        // granularitySqlNode.getKind().equals(SqlKind.INTERVAL_QUALIFIER)
        Preconditions.checkArgument(granularitySqlNode instanceof SqlIntervalQualifier, "Second argument to the FLOOR function in PARTITIONED BY clause is not a valid granularity. " + "Please refer to the documentation of FLOOR function");
        SqlIntervalQualifier granularityIntervalQualifier = (SqlIntervalQualifier) granularitySqlNode;
        Period period = TimeUnits.toPeriod(granularityIntervalQualifier.timeUnitRange);
        Preconditions.checkNotNull(period, StringUtils.format("%s is not a valid granularity for ingestion", granularityIntervalQualifier.timeUnitRange.toString()));
        return new PeriodGranularity(period, null, null);
    }
    // Shouldn't reach here
    throw new ParseException(StringUtils.format(genericParseFailedMessageFormatString, sqlNode.toString(), TimeFloorOperatorConversion.SQL_FUNCTION_NAME));
}
Also used : SqlIntervalQualifier(org.apache.calcite.sql.SqlIntervalQualifier) SqlCall(org.apache.calcite.sql.SqlCall) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) Period(org.joda.time.Period) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) SqlNode(org.apache.calcite.sql.SqlNode)

Example 29 with PeriodGranularity

use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.

the class HadoopIngestionSpecTest method testPeriodSegmentGranularitySpec.

@Test
public void testPeriodSegmentGranularitySpec() {
    final HadoopIngestionSpec schema;
    try {
        schema = jsonReadWriteRead("{\n" + "    \"dataSchema\": {\n" + "     \"dataSource\": \"foo\",\n" + "     \"metricsSpec\": [],\n" + "        \"granularitySpec\": {\n" + "                \"type\": \"uniform\",\n" + "                \"segmentGranularity\": {\"type\": \"period\", \"period\":\"PT1H\", \"timeZone\":\"America/Los_Angeles\"},\n" + "                \"intervals\": [\"2012-01-01/P1D\"]\n" + "        }\n" + "    }\n" + "}", HadoopIngestionSpec.class);
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
    final UniformGranularitySpec granularitySpec = (UniformGranularitySpec) schema.getDataSchema().getGranularitySpec();
    Assert.assertEquals("getSegmentGranularity", new PeriodGranularity(new Period("PT1H"), null, DateTimes.inferTzFromString("America/Los_Angeles")), granularitySpec.getSegmentGranularity());
}
Also used : UniformGranularitySpec(org.apache.druid.segment.indexing.granularity.UniformGranularitySpec) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) Period(org.joda.time.Period) Test(org.junit.Test)

Example 30 with PeriodGranularity

use of org.apache.druid.java.util.common.granularity.PeriodGranularity in project druid by druid-io.

the class SegmentAllocateActionTest method testSameIntervalWithSegmentGranularity.

@Test
public void testSameIntervalWithSegmentGranularity() {
    final Task task = NoopTask.create();
    taskActionTestKit.getTaskLockbox().add(task);
    Granularity segmentGranularity = new PeriodGranularity(Period.hours(1), null, DateTimes.inferTzFromString("Asia/Shanghai"));
    final SegmentIdWithShardSpec id1 = allocate(task, PARTY_TIME, Granularities.MINUTE, segmentGranularity, "s1", null);
    final SegmentIdWithShardSpec id2 = allocate(task, PARTY_TIME, Granularities.MINUTE, segmentGranularity, "s2", null);
    Assert.assertNotNull(id1);
    Assert.assertNotNull(id2);
}
Also used : Task(org.apache.druid.indexing.common.task.Task) NoopTask(org.apache.druid.indexing.common.task.NoopTask) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) Granularity(org.apache.druid.java.util.common.granularity.Granularity) PeriodGranularity(org.apache.druid.java.util.common.granularity.PeriodGranularity) LockGranularity(org.apache.druid.indexing.common.LockGranularity) SegmentIdWithShardSpec(org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec) Test(org.junit.Test)

Aggregations

PeriodGranularity (org.apache.druid.java.util.common.granularity.PeriodGranularity)65 Test (org.junit.Test)57 Period (org.joda.time.Period)52 LongSumAggregatorFactory (org.apache.druid.query.aggregation.LongSumAggregatorFactory)33 InitializedNullHandlingTest (org.apache.druid.testing.InitializedNullHandlingTest)27 DefaultDimensionSpec (org.apache.druid.query.dimension.DefaultDimensionSpec)24 DateTime (org.joda.time.DateTime)13 QueryRunner (org.apache.druid.query.QueryRunner)11 Result (org.apache.druid.query.Result)11 ArrayList (java.util.ArrayList)10 FinalizeResultsQueryRunner (org.apache.druid.query.FinalizeResultsQueryRunner)10 CountAggregatorFactory (org.apache.druid.query.aggregation.CountAggregatorFactory)10 ResponseContext (org.apache.druid.query.context.ResponseContext)9 DateTimeZone (org.joda.time.DateTimeZone)9 ChainedExecutionQueryRunner (org.apache.druid.query.ChainedExecutionQueryRunner)8 MultipleIntervalSegmentSpec (org.apache.druid.query.spec.MultipleIntervalSegmentSpec)8 Sequence (org.apache.druid.java.util.common.guava.Sequence)7 QueryPlus (org.apache.druid.query.QueryPlus)7 Interval (org.joda.time.Interval)7 QueryDataSource (org.apache.druid.query.QueryDataSource)6