Search in sources :

Example 1 with DataSegmentPusher

use of io.druid.segment.loading.DataSegmentPusher in project druid by druid-io.

the class SameIntervalMergeTaskTest method runTask.

private List<DataSegment> runTask(final SameIntervalMergeTask mergeTask, final String version) throws Exception {
    boolean isReady = mergeTask.isReady(new TaskActionClient() {

        @Override
        public <RetType> RetType submit(TaskAction<RetType> taskAction) throws IOException {
            if (taskAction instanceof LockTryAcquireAction) {
                // the lock of this interval is required
                Assert.assertEquals(mergeTask.getInterval(), ((LockTryAcquireAction) taskAction).getInterval());
                isRedayCountDown.countDown();
                taskLock = new TaskLock(mergeTask.getGroupId(), mergeTask.getDataSource(), mergeTask.getInterval(), version);
                return (RetType) taskLock;
            }
            return null;
        }
    });
    // ensure LockTryAcquireAction is submitted
    Assert.assertTrue(isReady);
    final List<DataSegment> segments = Lists.newArrayList();
    mergeTask.run(new TaskToolbox(null, null, new TaskActionClient() {

        @Override
        public <RetType> RetType submit(TaskAction<RetType> taskAction) throws IOException {
            if (taskAction instanceof LockListAction) {
                Assert.assertNotNull("taskLock should be acquired before list", taskLock);
                return (RetType) Arrays.asList(taskLock);
            }
            if (taskAction instanceof SegmentListUsedAction) {
                List<DataSegment> segments = ImmutableList.of(DataSegment.builder().dataSource(mergeTask.getDataSource()).interval(new Interval("2010-01-01/PT1H")).version("oldVersion").shardSpec(new LinearShardSpec(0)).build(), DataSegment.builder().dataSource(mergeTask.getDataSource()).interval(new Interval("2010-01-01/PT1H")).version("oldVersion").shardSpec(new LinearShardSpec(0)).build(), DataSegment.builder().dataSource(mergeTask.getDataSource()).interval(new Interval("2010-01-01/PT2H")).version("oldVersion").shardSpec(new LinearShardSpec(0)).build());
                return (RetType) segments;
            }
            if (taskAction instanceof SegmentInsertAction) {
                publishCountDown.countDown();
                return null;
            }
            return null;
        }
    }, new NoopServiceEmitter(), new DataSegmentPusher() {

        @Deprecated
        @Override
        public String getPathForHadoop(String dataSource) {
            return getPathForHadoop();
        }

        @Override
        public String getPathForHadoop() {
            return null;
        }

        @Override
        public DataSegment push(File file, DataSegment segment) throws IOException {
            // the merged segment is pushed to storage
            segments.add(segment);
            return segment;
        }
    }, null, null, null, null, null, null, null, null, new SegmentLoader() {

        @Override
        public boolean isSegmentLoaded(DataSegment segment) throws SegmentLoadingException {
            return false;
        }

        @Override
        public Segment getSegment(DataSegment segment) throws SegmentLoadingException {
            return null;
        }

        @Override
        public File getSegmentFiles(DataSegment segment) throws SegmentLoadingException {
            // dummy file to represent the downloaded segment's dir
            return new File("" + segment.getShardSpec().getPartitionNum());
        }

        @Override
        public void cleanup(DataSegment segment) throws SegmentLoadingException {
        }
    }, jsonMapper, temporaryFolder.newFolder(), EasyMock.createMock(IndexMerger.class), indexIO, null, null, EasyMock.createMock(IndexMergerV9.class)));
    return segments;
}
Also used : LockListAction(io.druid.indexing.common.actions.LockListAction) DataSegmentPusher(io.druid.segment.loading.DataSegmentPusher) LockTryAcquireAction(io.druid.indexing.common.actions.LockTryAcquireAction) TaskAction(io.druid.indexing.common.actions.TaskAction) LinearShardSpec(io.druid.timeline.partition.LinearShardSpec) NoopServiceEmitter(io.druid.server.metrics.NoopServiceEmitter) IOException(java.io.IOException) DataSegment(io.druid.timeline.DataSegment) SegmentLoader(io.druid.segment.loading.SegmentLoader) TaskToolbox(io.druid.indexing.common.TaskToolbox) TaskActionClient(io.druid.indexing.common.actions.TaskActionClient) TaskLock(io.druid.indexing.common.TaskLock) SegmentInsertAction(io.druid.indexing.common.actions.SegmentInsertAction) SegmentListUsedAction(io.druid.indexing.common.actions.SegmentListUsedAction) File(java.io.File) Interval(org.joda.time.Interval)

Example 2 with DataSegmentPusher

use of io.druid.segment.loading.DataSegmentPusher in project druid by druid-io.

the class IndexTaskTest method runTask.

private final List<DataSegment> runTask(final IndexTask indexTask) throws Exception {
    final List<DataSegment> segments = Lists.newArrayList();
    indexTask.run(new TaskToolbox(null, null, new TaskActionClient() {

        @Override
        public <RetType> RetType submit(TaskAction<RetType> taskAction) throws IOException {
            if (taskAction instanceof LockListAction) {
                return (RetType) Arrays.asList(new TaskLock("", "", null, new DateTime().toString()));
            }
            if (taskAction instanceof LockAcquireAction) {
                return (RetType) new TaskLock("groupId", "test", ((LockAcquireAction) taskAction).getInterval(), new DateTime().toString());
            }
            if (taskAction instanceof SegmentTransactionalInsertAction) {
                return (RetType) new SegmentPublishResult(((SegmentTransactionalInsertAction) taskAction).getSegments(), true);
            }
            if (taskAction instanceof SegmentAllocateAction) {
                SegmentAllocateAction action = (SegmentAllocateAction) taskAction;
                Interval interval = action.getPreferredSegmentGranularity().bucket(action.getTimestamp());
                ShardSpec shardSpec = new NumberedShardSpec(segmentAllocatePartitionCounter++, 0);
                return (RetType) new SegmentIdentifier(action.getDataSource(), interval, "latestVersion", shardSpec);
            }
            return null;
        }
    }, null, new DataSegmentPusher() {

        @Deprecated
        @Override
        public String getPathForHadoop(String dataSource) {
            return getPathForHadoop();
        }

        @Override
        public String getPathForHadoop() {
            return null;
        }

        @Override
        public DataSegment push(File file, DataSegment segment) throws IOException {
            segments.add(segment);
            return segment;
        }
    }, null, null, null, null, null, null, null, null, null, jsonMapper, temporaryFolder.newFolder(), indexMerger, indexIO, null, null, indexMergerV9));
    Collections.sort(segments);
    return segments;
}
Also used : LockListAction(io.druid.indexing.common.actions.LockListAction) DataSegmentPusher(io.druid.segment.loading.DataSegmentPusher) SegmentIdentifier(io.druid.segment.realtime.appenderator.SegmentIdentifier) TaskAction(io.druid.indexing.common.actions.TaskAction) SegmentTransactionalInsertAction(io.druid.indexing.common.actions.SegmentTransactionalInsertAction) DataSegment(io.druid.timeline.DataSegment) DateTime(org.joda.time.DateTime) NoneShardSpec(io.druid.timeline.partition.NoneShardSpec) ShardSpec(io.druid.timeline.partition.ShardSpec) NumberedShardSpec(io.druid.timeline.partition.NumberedShardSpec) HashBasedNumberedShardSpec(io.druid.timeline.partition.HashBasedNumberedShardSpec) TaskToolbox(io.druid.indexing.common.TaskToolbox) SegmentPublishResult(io.druid.indexing.overlord.SegmentPublishResult) TaskActionClient(io.druid.indexing.common.actions.TaskActionClient) TaskLock(io.druid.indexing.common.TaskLock) SegmentAllocateAction(io.druid.indexing.common.actions.SegmentAllocateAction) LockAcquireAction(io.druid.indexing.common.actions.LockAcquireAction) File(java.io.File) NumberedShardSpec(io.druid.timeline.partition.NumberedShardSpec) HashBasedNumberedShardSpec(io.druid.timeline.partition.HashBasedNumberedShardSpec) Interval(org.joda.time.Interval)

Example 3 with DataSegmentPusher

use of io.druid.segment.loading.DataSegmentPusher in project druid by druid-io.

the class RealtimePlumberSchoolTest method setUp.

@Before
public void setUp() throws Exception {
    tmpDir = Files.createTempDir();
    ObjectMapper jsonMapper = new DefaultObjectMapper();
    schema = new DataSchema("test", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(null, null, null), null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper);
    schema2 = new DataSchema("test", jsonMapper.convertValue(new StringInputRowParser(new JSONParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(null, null, null), null, null), null), Map.class), new AggregatorFactory[] { new CountAggregatorFactory("rows") }, new UniformGranularitySpec(Granularities.YEAR, Granularities.NONE, null), jsonMapper);
    announcer = EasyMock.createMock(DataSegmentAnnouncer.class);
    announcer.announceSegment(EasyMock.<DataSegment>anyObject());
    EasyMock.expectLastCall().anyTimes();
    segmentPublisher = EasyMock.createNiceMock(SegmentPublisher.class);
    dataSegmentPusher = EasyMock.createNiceMock(DataSegmentPusher.class);
    handoffNotifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
    handoffNotifier = EasyMock.createNiceMock(SegmentHandoffNotifier.class);
    EasyMock.expect(handoffNotifierFactory.createSegmentHandoffNotifier(EasyMock.anyString())).andReturn(handoffNotifier).anyTimes();
    EasyMock.expect(handoffNotifier.registerSegmentHandoffCallback(EasyMock.<SegmentDescriptor>anyObject(), EasyMock.<Executor>anyObject(), EasyMock.<Runnable>anyObject())).andReturn(true).anyTimes();
    emitter = EasyMock.createMock(ServiceEmitter.class);
    EasyMock.replay(announcer, segmentPublisher, dataSegmentPusher, handoffNotifierFactory, handoffNotifier, emitter);
    tuningConfig = new RealtimeTuningConfig(1, null, null, null, new IntervalStartVersioningPolicy(), rejectionPolicy, null, null, null, buildV9Directly, 0, 0, false, null);
    realtimePlumberSchool = new RealtimePlumberSchool(emitter, new DefaultQueryRunnerFactoryConglomerate(Maps.<Class<? extends Query>, QueryRunnerFactory>newHashMap()), dataSegmentPusher, announcer, segmentPublisher, handoffNotifierFactory, MoreExecutors.sameThreadExecutor(), TestHelper.getTestIndexMerger(), TestHelper.getTestIndexMergerV9(), TestHelper.getTestIndexIO(), MapCache.create(0), FireDepartmentTest.NO_CACHE_CONFIG, TestHelper.getObjectMapper());
    metrics = new FireDepartmentMetrics();
    plumber = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema, tuningConfig, metrics);
}
Also used : ServiceEmitter(com.metamx.emitter.service.ServiceEmitter) DataSegmentPusher(io.druid.segment.loading.DataSegmentPusher) DataSegmentAnnouncer(io.druid.server.coordination.DataSegmentAnnouncer) DefaultQueryRunnerFactoryConglomerate(io.druid.query.DefaultQueryRunnerFactoryConglomerate) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) RealtimeTuningConfig(io.druid.segment.indexing.RealtimeTuningConfig) DataSchema(io.druid.segment.indexing.DataSchema) UniformGranularitySpec(io.druid.segment.indexing.granularity.UniformGranularitySpec) SegmentPublisher(io.druid.segment.realtime.SegmentPublisher) Executor(java.util.concurrent.Executor) FireDepartmentMetrics(io.druid.segment.realtime.FireDepartmentMetrics) CountAggregatorFactory(io.druid.query.aggregation.CountAggregatorFactory) SegmentDescriptor(io.druid.query.SegmentDescriptor) StringInputRowParser(io.druid.data.input.impl.StringInputRowParser) TimestampSpec(io.druid.data.input.impl.TimestampSpec) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) JSONParseSpec(io.druid.data.input.impl.JSONParseSpec) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) Before(org.junit.Before)

Example 4 with DataSegmentPusher

use of io.druid.segment.loading.DataSegmentPusher in project druid by druid-io.

the class KafkaIndexTaskTest method makeToolboxFactory.

private void makeToolboxFactory() throws IOException {
    directory = tempFolder.newFolder();
    final TestUtils testUtils = new TestUtils();
    final ObjectMapper objectMapper = testUtils.getTestObjectMapper();
    for (Module module : new KafkaIndexTaskModule().getJacksonModules()) {
        objectMapper.registerModule(module);
    }
    final TaskConfig taskConfig = new TaskConfig(new File(directory, "taskBaseDir").getPath(), null, null, 50000, null, false, null, null);
    final TestDerbyConnector derbyConnector = derby.getConnector();
    derbyConnector.createDataSourceTable();
    derbyConnector.createPendingSegmentsTable();
    derbyConnector.createSegmentTable();
    derbyConnector.createRulesTable();
    derbyConnector.createConfigTable();
    derbyConnector.createTaskTables();
    derbyConnector.createAuditTable();
    taskStorage = new MetadataTaskStorage(derbyConnector, new TaskStorageConfig(null), new SQLMetadataStorageActionHandlerFactory(derbyConnector, derby.metadataTablesConfigSupplier().get(), objectMapper));
    metadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(testUtils.getTestObjectMapper(), derby.metadataTablesConfigSupplier().get(), derbyConnector);
    taskLockbox = new TaskLockbox(taskStorage);
    final TaskActionToolbox taskActionToolbox = new TaskActionToolbox(taskLockbox, metadataStorageCoordinator, emitter, new SupervisorManager(null));
    final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory(taskStorage, taskActionToolbox);
    final SegmentHandoffNotifierFactory handoffNotifierFactory = new SegmentHandoffNotifierFactory() {

        @Override
        public SegmentHandoffNotifier createSegmentHandoffNotifier(String dataSource) {
            return new SegmentHandoffNotifier() {

                @Override
                public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) {
                    if (doHandoff) {
                        // Simulate immediate handoff
                        exec.execute(handOffRunnable);
                    }
                    return true;
                }

                @Override
                public void start() {
                //Noop
                }

                @Override
                public void close() {
                //Noop
                }
            };
        }
    };
    final LocalDataSegmentPusherConfig dataSegmentPusherConfig = new LocalDataSegmentPusherConfig();
    dataSegmentPusherConfig.storageDirectory = getSegmentDirectory();
    final DataSegmentPusher dataSegmentPusher = new LocalDataSegmentPusher(dataSegmentPusherConfig, objectMapper);
    toolboxFactory = new TaskToolboxFactory(taskConfig, taskActionClientFactory, emitter, dataSegmentPusher, new TestDataSegmentKiller(), // DataSegmentMover
    null, // DataSegmentArchiver
    null, new TestDataSegmentAnnouncer(), handoffNotifierFactory, makeTimeseriesOnlyConglomerate(), // queryExecutorService
    MoreExecutors.sameThreadExecutor(), EasyMock.createMock(MonitorScheduler.class), new SegmentLoaderFactory(new SegmentLoaderLocalCacheManager(null, new SegmentLoaderConfig() {

        @Override
        public List<StorageLocationConfig> getLocations() {
            return Lists.newArrayList();
        }
    }, testUtils.getTestObjectMapper())), testUtils.getTestObjectMapper(), testUtils.getTestIndexMerger(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), testUtils.getTestIndexMergerV9());
}
Also used : DataSegmentPusher(io.druid.segment.loading.DataSegmentPusher) LocalDataSegmentPusher(io.druid.segment.loading.LocalDataSegmentPusher) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) TaskActionClientFactory(io.druid.indexing.common.actions.TaskActionClientFactory) TestDataSegmentAnnouncer(io.druid.indexing.test.TestDataSegmentAnnouncer) TaskConfig(io.druid.indexing.common.config.TaskConfig) TestUtils(io.druid.indexing.common.TestUtils) Executor(java.util.concurrent.Executor) TaskToolboxFactory(io.druid.indexing.common.TaskToolboxFactory) SegmentDescriptor(io.druid.query.SegmentDescriptor) TaskActionToolbox(io.druid.indexing.common.actions.TaskActionToolbox) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) SegmentLoaderConfig(io.druid.segment.loading.SegmentLoaderConfig) SegmentLoaderFactory(io.druid.indexing.common.SegmentLoaderFactory) SegmentLoaderLocalCacheManager(io.druid.segment.loading.SegmentLoaderLocalCacheManager) CacheConfig(io.druid.client.cache.CacheConfig) DefaultObjectMapper(io.druid.jackson.DefaultObjectMapper) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) IndexerSQLMetadataStorageCoordinator(io.druid.metadata.IndexerSQLMetadataStorageCoordinator) StorageLocationConfig(io.druid.segment.loading.StorageLocationConfig) TaskStorageConfig(io.druid.indexing.common.config.TaskStorageConfig) LocalDataSegmentPusherConfig(io.druid.segment.loading.LocalDataSegmentPusherConfig) SegmentHandoffNotifier(io.druid.segment.realtime.plumber.SegmentHandoffNotifier) TestDerbyConnector(io.druid.metadata.TestDerbyConnector) LocalDataSegmentPusher(io.druid.segment.loading.LocalDataSegmentPusher) TestDataSegmentKiller(io.druid.indexing.test.TestDataSegmentKiller) SegmentHandoffNotifierFactory(io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory) SupervisorManager(io.druid.indexing.overlord.supervisor.SupervisorManager) SQLMetadataStorageActionHandlerFactory(io.druid.metadata.SQLMetadataStorageActionHandlerFactory) TaskLockbox(io.druid.indexing.overlord.TaskLockbox) Module(com.fasterxml.jackson.databind.Module) MetadataTaskStorage(io.druid.indexing.overlord.MetadataTaskStorage) File(java.io.File)

Example 5 with DataSegmentPusher

use of io.druid.segment.loading.DataSegmentPusher in project druid by druid-io.

the class IngestSegmentFirehoseFactoryTest method constructorFeeder.

@Parameterized.Parameters(name = "{1}")
public static Collection<Object[]> constructorFeeder() throws IOException {
    final IndexSpec indexSpec = new IndexSpec();
    final HeapMemoryTaskStorage ts = new HeapMemoryTaskStorage(new TaskStorageConfig(null) {
    });
    final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withQueryGranularity(Granularities.NONE).withMinTimestamp(JodaUtils.MIN_INSTANT).withDimensionsSpec(ROW_PARSER).withMetrics(new AggregatorFactory[] { new LongSumAggregatorFactory(METRIC_LONG_NAME, DIM_LONG_NAME), new DoubleSumAggregatorFactory(METRIC_FLOAT_NAME, DIM_FLOAT_NAME) }).build();
    final OnheapIncrementalIndex index = new OnheapIncrementalIndex(schema, true, MAX_ROWS * MAX_SHARD_NUMBER);
    for (Integer i = 0; i < MAX_ROWS; ++i) {
        index.add(ROW_PARSER.parse(buildRow(i.longValue())));
    }
    if (!persistDir.mkdirs() && !persistDir.exists()) {
        throw new IOException(String.format("Could not create directory at [%s]", persistDir.getAbsolutePath()));
    }
    INDEX_MERGER.persist(index, persistDir, indexSpec);
    final TaskLockbox tl = new TaskLockbox(ts);
    final IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(null, null, null) {

        private final Set<DataSegment> published = Sets.newHashSet();

        private final Set<DataSegment> nuked = Sets.newHashSet();

        @Override
        public List<DataSegment> getUsedSegmentsForInterval(String dataSource, Interval interval) throws IOException {
            return ImmutableList.copyOf(segmentSet);
        }

        @Override
        public List<DataSegment> getUsedSegmentsForIntervals(String dataSource, List<Interval> interval) throws IOException {
            return ImmutableList.copyOf(segmentSet);
        }

        @Override
        public List<DataSegment> getUnusedSegmentsForInterval(String dataSource, Interval interval) {
            return ImmutableList.of();
        }

        @Override
        public Set<DataSegment> announceHistoricalSegments(Set<DataSegment> segments) {
            Set<DataSegment> added = Sets.newHashSet();
            for (final DataSegment segment : segments) {
                if (published.add(segment)) {
                    added.add(segment);
                }
            }
            return ImmutableSet.copyOf(added);
        }

        @Override
        public void deleteSegments(Set<DataSegment> segments) {
            nuked.addAll(segments);
        }
    };
    final LocalTaskActionClientFactory tac = new LocalTaskActionClientFactory(ts, new TaskActionToolbox(tl, mdc, newMockEmitter(), EasyMock.createMock(SupervisorManager.class)));
    SegmentHandoffNotifierFactory notifierFactory = EasyMock.createNiceMock(SegmentHandoffNotifierFactory.class);
    EasyMock.replay(notifierFactory);
    final TaskToolboxFactory taskToolboxFactory = new TaskToolboxFactory(new TaskConfig(tmpDir.getAbsolutePath(), null, null, 50000, null, false, null, null), tac, newMockEmitter(), new DataSegmentPusher() {

        @Deprecated
        @Override
        public String getPathForHadoop(String dataSource) {
            return getPathForHadoop();
        }

        @Override
        public String getPathForHadoop() {
            throw new UnsupportedOperationException();
        }

        @Override
        public DataSegment push(File file, DataSegment segment) throws IOException {
            return segment;
        }
    }, new DataSegmentKiller() {

        @Override
        public void kill(DataSegment segments) throws SegmentLoadingException {
        }

        @Override
        public void killAll() throws IOException {
            throw new UnsupportedOperationException("not implemented");
        }
    }, new DataSegmentMover() {

        @Override
        public DataSegment move(DataSegment dataSegment, Map<String, Object> targetLoadSpec) throws SegmentLoadingException {
            return dataSegment;
        }
    }, new DataSegmentArchiver() {

        @Override
        public DataSegment archive(DataSegment segment) throws SegmentLoadingException {
            return segment;
        }

        @Override
        public DataSegment restore(DataSegment segment) throws SegmentLoadingException {
            return segment;
        }
    }, // segment announcer
    null, notifierFactory, // query runner factory conglomerate corporation unionized collective
    null, // query executor service
    null, // monitor scheduler
    null, new SegmentLoaderFactory(new SegmentLoaderLocalCacheManager(null, new SegmentLoaderConfig() {

        @Override
        public List<StorageLocationConfig> getLocations() {
            return Lists.newArrayList();
        }
    }, MAPPER)), MAPPER, INDEX_MERGER, INDEX_IO, null, null, INDEX_MERGER_V9);
    Collection<Object[]> values = new LinkedList<>();
    for (InputRowParser parser : Arrays.<InputRowParser>asList(ROW_PARSER, new MapInputRowParser(new JSONParseSpec(new TimestampSpec(TIME_COLUMN, "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.<String>of()), ImmutableList.of(DIM_FLOAT_NAME, DIM_LONG_NAME), ImmutableList.<SpatialDimensionSchema>of()), null, null)))) {
        for (List<String> dim_names : Arrays.<List<String>>asList(null, ImmutableList.of(DIM_NAME))) {
            for (List<String> metric_names : Arrays.<List<String>>asList(null, ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME))) {
                values.add(new Object[] { new IngestSegmentFirehoseFactory(DATA_SOURCE_NAME, FOREVER, new SelectorDimFilter(DIM_NAME, DIM_VALUE, null), dim_names, metric_names, Guice.createInjector(new Module() {

                    @Override
                    public void configure(Binder binder) {
                        binder.bind(TaskToolboxFactory.class).toInstance(taskToolboxFactory);
                    }
                }), INDEX_IO), String.format("DimNames[%s]MetricNames[%s]ParserDimNames[%s]", dim_names == null ? "null" : "dims", metric_names == null ? "null" : "metrics", parser == ROW_PARSER ? "dims" : "null"), parser });
            }
        }
    }
    return values;
}
Also used : DataSegmentArchiver(io.druid.segment.loading.DataSegmentArchiver) OnheapIncrementalIndex(io.druid.segment.incremental.OnheapIncrementalIndex) DataSegmentMover(io.druid.segment.loading.DataSegmentMover) List(java.util.List) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) LinkedList(java.util.LinkedList) LocalTaskActionClientFactory(io.druid.indexing.common.actions.LocalTaskActionClientFactory) SegmentLoaderConfig(io.druid.segment.loading.SegmentLoaderConfig) SegmentLoaderFactory(io.druid.indexing.common.SegmentLoaderFactory) SegmentLoaderLocalCacheManager(io.druid.segment.loading.SegmentLoaderLocalCacheManager) JSONParseSpec(io.druid.data.input.impl.JSONParseSpec) IndexerSQLMetadataStorageCoordinator(io.druid.metadata.IndexerSQLMetadataStorageCoordinator) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) HeapMemoryTaskStorage(io.druid.indexing.overlord.HeapMemoryTaskStorage) LinkedList(java.util.LinkedList) MapInputRowParser(io.druid.data.input.impl.MapInputRowParser) InputRowParser(io.druid.data.input.impl.InputRowParser) Module(com.google.inject.Module) SimpleModule(com.fasterxml.jackson.databind.module.SimpleModule) File(java.io.File) Interval(org.joda.time.Interval) IndexSpec(io.druid.segment.IndexSpec) DataSegmentPusher(io.druid.segment.loading.DataSegmentPusher) ImmutableSet(com.google.common.collect.ImmutableSet) Set(java.util.Set) MapInputRowParser(io.druid.data.input.impl.MapInputRowParser) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) DataSegmentKiller(io.druid.segment.loading.DataSegmentKiller) TaskConfig(io.druid.indexing.common.config.TaskConfig) DataSegment(io.druid.timeline.DataSegment) Binder(com.google.inject.Binder) TaskToolboxFactory(io.druid.indexing.common.TaskToolboxFactory) SelectorDimFilter(io.druid.query.filter.SelectorDimFilter) TaskActionToolbox(io.druid.indexing.common.actions.TaskActionToolbox) TimestampSpec(io.druid.data.input.impl.TimestampSpec) IncrementalIndexSchema(io.druid.segment.incremental.IncrementalIndexSchema) SegmentLoadingException(io.druid.segment.loading.SegmentLoadingException) TaskStorageConfig(io.druid.indexing.common.config.TaskStorageConfig) IOException(java.io.IOException) DoubleSumAggregatorFactory(io.druid.query.aggregation.DoubleSumAggregatorFactory) AggregatorFactory(io.druid.query.aggregation.AggregatorFactory) LongSumAggregatorFactory(io.druid.query.aggregation.LongSumAggregatorFactory) SegmentHandoffNotifierFactory(io.druid.segment.realtime.plumber.SegmentHandoffNotifierFactory) SpatialDimensionSchema(io.druid.data.input.impl.SpatialDimensionSchema) TaskLockbox(io.druid.indexing.overlord.TaskLockbox) DimensionsSpec(io.druid.data.input.impl.DimensionsSpec)

Aggregations

DataSegmentPusher (io.druid.segment.loading.DataSegmentPusher)7 DataSegment (io.druid.timeline.DataSegment)5 File (java.io.File)5 IOException (java.io.IOException)3 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 DimensionsSpec (io.druid.data.input.impl.DimensionsSpec)2 JSONParseSpec (io.druid.data.input.impl.JSONParseSpec)2 TimestampSpec (io.druid.data.input.impl.TimestampSpec)2 SegmentLoaderFactory (io.druid.indexing.common.SegmentLoaderFactory)2 TaskLock (io.druid.indexing.common.TaskLock)2 TaskToolbox (io.druid.indexing.common.TaskToolbox)2 TaskToolboxFactory (io.druid.indexing.common.TaskToolboxFactory)2 LocalTaskActionClientFactory (io.druid.indexing.common.actions.LocalTaskActionClientFactory)2 LockListAction (io.druid.indexing.common.actions.LockListAction)2 TaskAction (io.druid.indexing.common.actions.TaskAction)2 TaskActionClient (io.druid.indexing.common.actions.TaskActionClient)2 TaskActionToolbox (io.druid.indexing.common.actions.TaskActionToolbox)2 TaskConfig (io.druid.indexing.common.config.TaskConfig)2 TaskStorageConfig (io.druid.indexing.common.config.TaskStorageConfig)2 TaskLockbox (io.druid.indexing.overlord.TaskLockbox)2