Search in sources :

Example 1 with OverlordActionBasedUsedSegmentLister

use of io.druid.indexing.hadoop.OverlordActionBasedUsedSegmentLister in project druid by druid-io.

the class HadoopIndexTask method run.

@SuppressWarnings("unchecked")
@Override
public TaskStatus run(TaskToolbox toolbox) throws Exception {
    final ClassLoader loader = buildClassLoader(toolbox);
    boolean determineIntervals = !spec.getDataSchema().getGranularitySpec().bucketIntervals().isPresent();
    spec = HadoopIngestionSpec.updateSegmentListIfDatasourcePathSpecIsUsed(spec, jsonMapper, new OverlordActionBasedUsedSegmentLister(toolbox));
    final String config = invokeForeignLoader("io.druid.indexing.common.task.HadoopIndexTask$HadoopDetermineConfigInnerProcessing", new String[] { toolbox.getObjectMapper().writeValueAsString(spec), toolbox.getConfig().getHadoopWorkingPath(), toolbox.getSegmentPusher().getPathForHadoop() }, loader);
    final HadoopIngestionSpec indexerSchema = toolbox.getObjectMapper().readValue(config, HadoopIngestionSpec.class);
    // We should have a lock from before we started running only if interval was specified
    String version;
    if (determineIntervals) {
        Interval interval = JodaUtils.umbrellaInterval(JodaUtils.condenseIntervals(indexerSchema.getDataSchema().getGranularitySpec().bucketIntervals().get()));
        TaskLock lock = toolbox.getTaskActionClient().submit(new LockAcquireAction(interval));
        version = lock.getVersion();
    } else {
        Iterable<TaskLock> locks = getTaskLocks(toolbox);
        final TaskLock myLock = Iterables.getOnlyElement(locks);
        version = myLock.getVersion();
    }
    final String specVersion = indexerSchema.getTuningConfig().getVersion();
    if (indexerSchema.getTuningConfig().isUseExplicitVersion()) {
        if (specVersion.compareTo(version) < 0) {
            version = specVersion;
        } else {
            log.error("Spec version can not be greater than or equal to the lock version, Spec version: [%s] Lock version: [%s].", specVersion, version);
            return TaskStatus.failure(getId());
        }
    }
    log.info("Setting version to: %s", version);
    final String segments = invokeForeignLoader("io.druid.indexing.common.task.HadoopIndexTask$HadoopIndexGeneratorInnerProcessing", new String[] { toolbox.getObjectMapper().writeValueAsString(indexerSchema), version }, loader);
    if (segments != null) {
        List<DataSegment> publishedSegments = toolbox.getObjectMapper().readValue(segments, new TypeReference<List<DataSegment>>() {
        });
        toolbox.publishSegments(publishedSegments);
        return TaskStatus.success(getId());
    } else {
        return TaskStatus.failure(getId());
    }
}
Also used : HadoopIngestionSpec(io.druid.indexer.HadoopIngestionSpec) OverlordActionBasedUsedSegmentLister(io.druid.indexing.hadoop.OverlordActionBasedUsedSegmentLister) DataSegment(io.druid.timeline.DataSegment) TaskLock(io.druid.indexing.common.TaskLock) LockAcquireAction(io.druid.indexing.common.actions.LockAcquireAction) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) Interval(org.joda.time.Interval)

Aggregations

ImmutableList (com.google.common.collect.ImmutableList)1 HadoopIngestionSpec (io.druid.indexer.HadoopIngestionSpec)1 TaskLock (io.druid.indexing.common.TaskLock)1 LockAcquireAction (io.druid.indexing.common.actions.LockAcquireAction)1 OverlordActionBasedUsedSegmentLister (io.druid.indexing.hadoop.OverlordActionBasedUsedSegmentLister)1 DataSegment (io.druid.timeline.DataSegment)1 List (java.util.List)1 Interval (org.joda.time.Interval)1