use of org.apache.druid.indexing.overlord.supervisor.SupervisorSpec in project druid by druid-io.
the class SupervisorResourceFilterTest method testSupervisorNotFound.
@Test
public void testSupervisorNotFound() {
String dataSource = "not_exist_data_source";
expect(containerRequest.getPathSegments()).andReturn(getPathSegments("/druid/indexer/v1/supervisor/" + dataSource)).anyTimes();
expect(containerRequest.getMethod()).andReturn("POST").anyTimes();
SupervisorSpec supervisorSpec = EasyMock.createMock(SupervisorSpec.class);
expect(supervisorSpec.getDataSources()).andReturn(Collections.singletonList(dataSource)).anyTimes();
expect(supervisorManager.getSupervisorSpec(dataSource)).andReturn(Optional.absent()).atLeastOnce();
EasyMock.replay(containerRequest);
EasyMock.replay(supervisorManager);
WebApplicationException expected = null;
try {
resourceFilter.filter(containerRequest);
} catch (WebApplicationException e) {
expected = e;
}
Assert.assertNotNull(expected);
Assert.assertEquals(expected.getResponse().getStatus(), Response.Status.NOT_FOUND.getStatusCode());
EasyMock.verify(containerRequest);
EasyMock.verify(supervisorManager);
}
use of org.apache.druid.indexing.overlord.supervisor.SupervisorSpec in project druid by druid-io.
the class OverlordSecurityResourceFilterTest method setUp.
@Before
public void setUp() {
if (resourceFilter instanceof TaskResourceFilter && !mockedOnceTsqa) {
// Since we are creating the mocked tsqa object only once and getting that object from Guice here therefore
// if the mockedOnce check is not done then we will call EasyMock.expect and EasyMock.replay on the mocked object
// multiple times and it will throw exceptions
tsqa = injector.getInstance(TaskStorageQueryAdapter.class);
EasyMock.expect(tsqa.getTask(EasyMock.anyString())).andReturn(Optional.of(noopTask)).anyTimes();
EasyMock.replay(tsqa);
mockedOnceTsqa = true;
}
if (resourceFilter instanceof SupervisorResourceFilter && !mockedOnceSM) {
supervisorManager = injector.getInstance(SupervisorManager.class);
SupervisorSpec supervisorSpec = new SupervisorSpec() {
@Override
public String getId() {
return "id";
}
@Override
public Supervisor createSupervisor() {
return null;
}
@Override
public SupervisorTaskAutoScaler createAutoscaler(Supervisor supervisor) {
return new NoopTaskAutoScaler();
}
@Override
public List<String> getDataSources() {
return ImmutableList.of("test");
}
@Override
public SupervisorSpec createSuspendedSpec() {
return null;
}
@Override
public SupervisorSpec createRunningSpec() {
return null;
}
@Override
public boolean isSuspended() {
return false;
}
@Override
public String getType() {
return null;
}
@Override
public String getSource() {
return null;
}
};
EasyMock.expect(supervisorManager.getSupervisorSpec(EasyMock.anyString())).andReturn(Optional.of(supervisorSpec)).anyTimes();
EasyMock.replay(supervisorManager);
mockedOnceSM = true;
}
setUp(resourceFilter);
}
use of org.apache.druid.indexing.overlord.supervisor.SupervisorSpec in project druid by druid-io.
the class SQLMetadataSupervisorManager method removeTerminatedSupervisorsOlderThan.
@Override
public int removeTerminatedSupervisorsOlderThan(long timestamp) {
DateTime dateTime = DateTimes.utc(timestamp);
Map<String, SupervisorSpec> terminatedSupervisors = getLatestTerminatedOnly();
return dbi.withHandle(handle -> {
final PreparedBatch batch = handle.prepareBatch(StringUtils.format("DELETE FROM %1$s WHERE spec_id = :spec_id AND created_date < '%2$s'", getSupervisorsTable(), dateTime.toString()));
for (Map.Entry<String, SupervisorSpec> supervisor : terminatedSupervisors.entrySet()) {
batch.bind("spec_id", supervisor.getKey()).add();
}
int[] result = batch.execute();
return IntStream.of(result).sum();
});
}
use of org.apache.druid.indexing.overlord.supervisor.SupervisorSpec in project druid by druid-io.
the class TaskResourceFilterTest method testTaskNotFound.
@Test
public void testTaskNotFound() {
String taskId = "not_exist_task_id";
expect(containerRequest.getPathSegments()).andReturn(getPathSegments("/task/" + taskId)).anyTimes();
expect(containerRequest.getMethod()).andReturn("POST").anyTimes();
SupervisorSpec supervisorSpec = EasyMock.createMock(SupervisorSpec.class);
expect(supervisorSpec.getDataSources()).andReturn(Collections.singletonList(taskId)).anyTimes();
expect(taskStorageQueryAdapter.getTask(taskId)).andReturn(Optional.absent()).atLeastOnce();
EasyMock.replay(containerRequest);
EasyMock.replay(taskStorageQueryAdapter);
WebApplicationException expected = null;
try {
resourceFilter.filter(containerRequest);
} catch (WebApplicationException e) {
expected = e;
}
Assert.assertNotNull(expected);
Assert.assertEquals(expected.getResponse().getStatus(), Response.Status.NOT_FOUND.getStatusCode());
EasyMock.verify(containerRequest);
EasyMock.verify(taskStorageQueryAdapter);
}
use of org.apache.druid.indexing.overlord.supervisor.SupervisorSpec in project druid by druid-io.
the class KillDatasourceMetadata method run.
@Override
public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) {
long currentTimeMillis = System.currentTimeMillis();
if ((lastKillTime + period) < currentTimeMillis) {
lastKillTime = currentTimeMillis;
long timestamp = currentTimeMillis - retainDuration;
try {
// Datasource metadata only exists for datasource with supervisor
// To determine if datasource metadata is still active, we check if the supervisor for that particular datasource
// is still active or not
Map<String, SupervisorSpec> allActiveSupervisor = metadataSupervisorManager.getLatestActiveOnly();
Set<String> allDatasourceWithActiveSupervisor = allActiveSupervisor.values().stream().map(supervisorSpec -> supervisorSpec.getDataSources()).flatMap(Collection::stream).filter(datasource -> !Strings.isNullOrEmpty(datasource)).collect(Collectors.toSet());
// We exclude removing datasource metadata with active supervisor
int datasourceMetadataRemovedCount = indexerMetadataStorageCoordinator.removeDataSourceMetadataOlderThan(timestamp, allDatasourceWithActiveSupervisor);
ServiceEmitter emitter = params.getEmitter();
emitter.emit(new ServiceMetricEvent.Builder().build("metadata/kill/datasource/count", datasourceMetadataRemovedCount));
log.info("Finished running KillDatasourceMetadata duty. Removed %,d datasource metadata", datasourceMetadataRemovedCount);
} catch (Exception e) {
log.error(e, "Failed to kill datasource metadata");
}
}
return params;
}
Aggregations