use of org.apache.kafka.trogdor.rest.TasksResponse in project kafka by apache.
the class CoordinatorRestResource method tasks.
@GET
@Path("/tasks/")
public Response tasks(@QueryParam("taskId") List<String> taskId, @DefaultValue("0") @QueryParam("firstStartMs") long firstStartMs, @DefaultValue("0") @QueryParam("lastStartMs") long lastStartMs, @DefaultValue("0") @QueryParam("firstEndMs") long firstEndMs, @DefaultValue("0") @QueryParam("lastEndMs") long lastEndMs, @DefaultValue("") @QueryParam("state") String state) throws Throwable {
boolean isEmptyState = state.equals("");
if (!isEmptyState && !TaskStateType.Constants.VALUES.contains(state)) {
return Response.status(400).entity(String.format("State %s is invalid. Must be one of %s", state, TaskStateType.Constants.VALUES)).build();
}
Optional<TaskStateType> givenState = Optional.ofNullable(isEmptyState ? null : TaskStateType.valueOf(state));
TasksResponse resp = coordinator().tasks(new TasksRequest(taskId, firstStartMs, lastStartMs, firstEndMs, lastEndMs, givenState));
return Response.status(200).entity(resp).build();
}
use of org.apache.kafka.trogdor.rest.TasksResponse in project kafka by apache.
the class JsonSerializationTest method testDeserializationDoesNotProduceNulls.
@Test
public void testDeserializationDoesNotProduceNulls() throws Exception {
verify(new FilesUnreadableFaultSpec(0, 0, null, null, null, 0));
verify(new Kibosh.KiboshControlFile(null));
verify(new NetworkPartitionFaultSpec(0, 0, null));
verify(new ProcessStopFaultSpec(0, 0, null, null));
verify(new AgentStatusResponse(0, null));
verify(new TasksResponse(null));
verify(new WorkerDone(null, null, 0, 0, null, null));
verify(new WorkerRunning(null, null, 0, null));
verify(new WorkerStopping(null, null, 0, null));
verify(new ProduceBenchSpec(0, 0, null, null, 0, 0, null, null, Optional.empty(), null, null, null, null, null, false, false));
verify(new RoundTripWorkloadSpec(0, 0, null, null, null, null, null, null, 0, null, null, 0));
verify(new TopicsSpec());
verify(new PartitionsSpec(0, (short) 0, null, null));
Map<Integer, List<Integer>> partitionAssignments = new HashMap<Integer, List<Integer>>();
partitionAssignments.put(0, Arrays.asList(1, 2, 3));
partitionAssignments.put(1, Arrays.asList(1, 2, 3));
verify(new PartitionsSpec(0, (short) 0, partitionAssignments, null));
verify(new PartitionsSpec(0, (short) 0, null, null));
}
Aggregations