use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class ImmutableWorkerInfoTest method testSerde.
@Test
public void testSerde() throws Exception {
ImmutableWorkerInfo workerInfo = new ImmutableWorkerInfo(new Worker("testWorker", "192.0.0.1", 10, "v1"), 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), new DateTime("2015-01-01T01:01:01Z"));
ObjectMapper mapper = new DefaultObjectMapper();
final ImmutableWorkerInfo serde = mapper.readValue(mapper.writeValueAsString(workerInfo), ImmutableWorkerInfo.class);
Assert.assertEquals(workerInfo, serde);
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class TaskLifecycleTest method runTask.
private TaskStatus runTask(final Task task) throws Exception {
final Task dummyTask = new DefaultObjectMapper().readValue("{\"type\":\"noop\", \"isReadyResult\":\"exception\"}\"", Task.class);
final long startTime = System.currentTimeMillis();
Preconditions.checkArgument(!task.getId().equals(dummyTask.getId()));
// Since multiple tasks can be run in a single unit test using runTask(), hence this check and synchronization
synchronized (this) {
if (!taskQueue.isActive()) {
taskQueue.start();
}
}
taskQueue.add(dummyTask);
taskQueue.add(task);
TaskStatus retVal = null;
for (final String taskId : ImmutableList.of(dummyTask.getId(), task.getId())) {
try {
TaskStatus status;
while ((status = tsqa.getStatus(taskId).get()).isRunnable()) {
if (System.currentTimeMillis() > startTime + 10 * 1000) {
throw new ISE("Where did the task go?!: %s", task.getId());
}
Thread.sleep(100);
}
if (taskId.equals(task.getId())) {
retVal = status;
}
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
return retVal;
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class TaskLifecycleTest method setUpTaskToolboxFactory.
private TaskToolboxFactory setUpTaskToolboxFactory(DataSegmentPusher dataSegmentPusher, SegmentHandoffNotifierFactory handoffNotifierFactory, TestIndexerMetadataStorageCoordinator mdc) throws IOException {
Preconditions.checkNotNull(queryRunnerFactoryConglomerate);
Preconditions.checkNotNull(monitorScheduler);
Preconditions.checkNotNull(taskStorage);
Preconditions.checkNotNull(emitter);
taskLockbox = new TaskLockbox(taskStorage);
tac = new LocalTaskActionClientFactory(taskStorage, new TaskActionToolbox(taskLockbox, mdc, emitter, EasyMock.createMock(SupervisorManager.class)));
File tmpDir = temporaryFolder.newFolder();
taskConfig = new TaskConfig(tmpDir.toString(), null, null, 50000, null, false, null, null);
return new TaskToolboxFactory(taskConfig, tac, emitter, dataSegmentPusher, new LocalDataSegmentKiller(new LocalDataSegmentPusherConfig()), new DataSegmentMover() {
@Override
public DataSegment move(DataSegment dataSegment, Map<String, Object> targetLoadSpec) throws SegmentLoadingException {
return dataSegment;
}
}, new DataSegmentArchiver() {
@Override
public DataSegment archive(DataSegment segment) throws SegmentLoadingException {
return segment;
}
@Override
public DataSegment restore(DataSegment segment) throws SegmentLoadingException {
return segment;
}
}, new DataSegmentAnnouncer() {
@Override
public void announceSegment(DataSegment segment) throws IOException {
announcedSinks++;
}
@Override
public void unannounceSegment(DataSegment segment) throws IOException {
}
@Override
public void announceSegments(Iterable<DataSegment> segments) throws IOException {
}
@Override
public void unannounceSegments(Iterable<DataSegment> segments) throws IOException {
}
@Override
public boolean isAnnounced(DataSegment segment) {
return false;
}
}, // segment announcer
handoffNotifierFactory, // query runner factory conglomerate corporation unionized collective
queryRunnerFactoryConglomerate, // query executor service
MoreExecutors.sameThreadExecutor(), // monitor scheduler
monitorScheduler, new SegmentLoaderFactory(new SegmentLoaderLocalCacheManager(null, new SegmentLoaderConfig() {
@Override
public List<StorageLocationConfig> getLocations() {
return Lists.newArrayList();
}
}, new DefaultObjectMapper())), MAPPER, INDEX_MERGER, INDEX_IO, MapCache.create(0), FireDepartmentTest.NO_CACHE_CONFIG, INDEX_MERGER_V9);
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class TaskLifecycleTest method testNoopTask.
@Test
public void testNoopTask() throws Exception {
final Task noopTask = new DefaultObjectMapper().readValue("{\"type\":\"noop\", \"runTime\":\"100\"}\"", Task.class);
final TaskStatus status = runTask(noopTask);
Assert.assertEquals("statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode());
Assert.assertEquals("num segments published", 0, mdc.getPublished().size());
Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size());
}
use of io.druid.jackson.DefaultObjectMapper in project druid by druid-io.
the class JavaScriptWorkerSelectStrategyTest method testDisabled.
@Test
public void testDisabled() throws Exception {
ObjectMapper mapper = new DefaultObjectMapper();
mapper.setInjectableValues(new InjectableValues.Std().addValue(JavaScriptConfig.class, new JavaScriptConfig(false)));
final String strategyString = mapper.writeValueAsString(STRATEGY);
expectedException.expect(JsonMappingException.class);
expectedException.expectCause(CoreMatchers.<Throwable>instanceOf(IllegalStateException.class));
expectedException.expectMessage("JavaScript is disabled");
mapper.readValue(strategyString, JavaScriptWorkerSelectStrategy.class);
}
Aggregations