use of com.sun.jersey.api.client.Client in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetFlowsNotPresent.
@Test
public void testGetFlowsNotPresent() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster2/flows");
ClientResponse resp = getResponse(client, uri);
Set<FlowActivityEntity> entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(0, entities.size());
} finally {
client.destroy();
}
}
use of com.sun.jersey.api.client.Client in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetFlowRunAppsNotPresent.
@Test
public void testGetFlowRunAppsNotPresent() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster2/users/user1/flows/flow_name/runs/" + "1002345678919/apps");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(0, entities.size());
} finally {
client.destroy();
}
}
use of com.sun.jersey.api.client.Client in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetEntityDataToRetrieve.
/**
* Tests if specific configs and metrics are retrieve for getEntity call.
*/
@Test
public void testGetEntityDataToRetrieve() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1/entity2?confstoretrieve=cfg_,configuration_");
ClientResponse resp = getResponse(client, uri);
TimelineEntity entity = resp.getEntity(TimelineEntity.class);
assertNotNull(entity);
assertEquals("entity2", entity.getId());
assertEquals("type1", entity.getType());
assertEquals(2, entity.getConfigs().size());
for (String configKey : entity.getConfigs().keySet()) {
assertTrue(configKey.startsWith("configuration_") || configKey.startsWith("cfg_"));
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1/entity2?confstoretrieve=!(cfg_,configuration_)");
resp = getResponse(client, uri);
entity = resp.getEntity(TimelineEntity.class);
assertNotNull(entity);
assertEquals("entity2", entity.getId());
assertEquals("type1", entity.getType());
assertEquals(1, entity.getConfigs().size());
for (String configKey : entity.getConfigs().keySet()) {
assertTrue(configKey.startsWith("config_"));
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1/entity2?metricstoretrieve=MAP1_,HDFS_");
resp = getResponse(client, uri);
entity = resp.getEntity(TimelineEntity.class);
assertNotNull(entity);
assertEquals("entity2", entity.getId());
assertEquals("type1", entity.getType());
assertEquals(2, entity.getMetrics().size());
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(metric.getId().startsWith("MAP1_") || metric.getId().startsWith("HDFS_"));
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1/entity2?metricstoretrieve=!(MAP1_,HDFS_)");
resp = getResponse(client, uri);
entity = resp.getEntity(TimelineEntity.class);
assertNotNull(entity);
assertEquals("entity2", entity.getId());
assertEquals("type1", entity.getType());
assertEquals(1, entity.getMetrics().size());
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(metric.getId().startsWith("MAP11_"));
assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
assertEquals(1, metric.getValues().size());
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1/entity2?metricstoretrieve=!(MAP1_,HDFS_)&" + "metricslimit=5");
resp = getResponse(client, uri);
entity = resp.getEntity(TimelineEntity.class);
assertNotNull(entity);
assertEquals("entity2", entity.getId());
assertEquals("type1", entity.getType());
assertEquals(1, entity.getMetrics().size());
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(metric.getId().startsWith("MAP11_"));
assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
}
} finally {
client.destroy();
}
}
use of com.sun.jersey.api.client.Client in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetFlowRunsMetricsToRetrieve.
@Test
public void testGetFlowRunsMetricsToRetrieve() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" + "metricstoretrieve=MAP_,HDFS_");
ClientResponse resp = getResponse(client, uri);
Set<FlowRunEntity> entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
int metricCnt = 0;
for (FlowRunEntity entity : entities) {
metricCnt += entity.getMetrics().size();
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(metric.getId().startsWith("MAP_") || metric.getId().startsWith("HDFS_"));
}
}
assertEquals(3, metricCnt);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/users/user1/flows/flow_name/runs?" + "metricstoretrieve=!(MAP_,HDFS_)");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<FlowRunEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE + "; charset=utf-8", resp.getType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
metricCnt = 0;
for (FlowRunEntity entity : entities) {
metricCnt += entity.getMetrics().size();
for (TimelineMetric metric : entity.getMetrics()) {
assertTrue(metric.getId().startsWith("MAP1_"));
}
}
assertEquals(1, metricCnt);
} finally {
client.destroy();
}
}
use of com.sun.jersey.api.client.Client in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method testGetEntitiesEventFilters.
@Test
public void testGetEntitiesEventFilters() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=event1,event3");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertNotNull(entities);
assertEquals(2, entities.size());
for (TimelineEntity entity : entities) {
assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
}
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=!(event1,event3)");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertNotNull(entities);
assertEquals(0, entities.size());
// eventfilters=!(event1,event3) OR event5,event6
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=!(event1,event3)%20OR%20event5,event6");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertNotNull(entities);
assertEquals(1, entities.size());
for (TimelineEntity entity : entities) {
assertTrue(entity.getId().equals("entity2"));
}
// eventfilters=(!(event1,event3) OR event5,event6) OR
// (event1,event2 AND (event3,event4))
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?eventfilters=(!(event1,event3)%20OR%20event5," + "event6)%20OR%20(event1,event2%20AND%20(event3,event4))");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertNotNull(entities);
assertEquals(2, entities.size());
for (TimelineEntity entity : entities) {
assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
}
} finally {
client.destroy();
}
}
Aggregations