Search in sources :

Example 1 with GenericType

use of com.sun.jersey.api.client.GenericType in project hadoop by apache.

the class TestNMWebServices method testContainerLogs.

private void testContainerLogs(WebResource r, ContainerId containerId) throws IOException {
    final String containerIdStr = containerId.toString();
    final ApplicationAttemptId appAttemptId = containerId.getApplicationAttemptId();
    final ApplicationId appId = appAttemptId.getApplicationId();
    final String appIdStr = appId.toString();
    final String filename = "logfile1";
    final String logMessage = "log message\n";
    nmContext.getApplications().put(appId, new ApplicationImpl(null, "user", appId, null, nmContext));
    MockContainer container = new MockContainer(appAttemptId, new AsyncDispatcher(), new Configuration(), "user", appId, 1);
    container.setState(ContainerState.RUNNING);
    nmContext.getContainers().put(containerId, container);
    // write out log file
    Path path = dirsHandler.getLogPathForWrite(ContainerLaunch.getRelativeContainerLogDir(appIdStr, containerIdStr) + "/" + filename, false);
    File logFile = new File(path.toUri().getPath());
    logFile.deleteOnExit();
    assertTrue("Failed to create log dir", logFile.getParentFile().mkdirs());
    PrintWriter pw = new PrintWriter(logFile);
    pw.print(logMessage);
    pw.close();
    // ask for it
    ClientResponse response = r.path(filename).accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
    String responseText = response.getEntity(String.class);
    String responseLogMessage = getLogContext(responseText);
    assertEquals(logMessage, responseLogMessage);
    int fullTextSize = responseLogMessage.getBytes().length;
    // specify how many bytes we should get from logs
    // specify a position number, it would get the first n bytes from
    // container log
    response = r.path(filename).queryParam("size", "5").accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
    responseText = response.getEntity(String.class);
    responseLogMessage = getLogContext(responseText);
    assertEquals(5, responseLogMessage.getBytes().length);
    assertEquals(new String(logMessage.getBytes(), 0, 5), responseLogMessage);
    assertTrue(fullTextSize >= responseLogMessage.getBytes().length);
    // specify the bytes which is larger than the actual file size,
    // we would get the full logs
    response = r.path(filename).queryParam("size", "10000").accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
    responseText = response.getEntity(String.class);
    responseLogMessage = getLogContext(responseText);
    assertEquals(fullTextSize, responseLogMessage.getBytes().length);
    assertEquals(logMessage, responseLogMessage);
    // specify a negative number, it would get the last n bytes from
    // container log
    response = r.path(filename).queryParam("size", "-5").accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
    responseText = response.getEntity(String.class);
    responseLogMessage = getLogContext(responseText);
    assertEquals(5, responseLogMessage.getBytes().length);
    assertEquals(new String(logMessage.getBytes(), logMessage.getBytes().length - 5, 5), responseLogMessage);
    assertTrue(fullTextSize >= responseLogMessage.getBytes().length);
    response = r.path(filename).queryParam("size", "-10000").accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
    responseText = response.getEntity(String.class);
    responseLogMessage = getLogContext(responseText);
    assertEquals("text/plain; charset=utf-8", response.getType().toString());
    assertEquals(fullTextSize, responseLogMessage.getBytes().length);
    assertEquals(logMessage, responseLogMessage);
    // ask and download it
    response = r.path(filename).queryParam("format", "octet-stream").accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
    responseText = response.getEntity(String.class);
    responseLogMessage = getLogContext(responseText);
    assertEquals(logMessage, responseLogMessage);
    assertEquals(200, response.getStatus());
    assertEquals("application/octet-stream; charset=utf-8", response.getType().toString());
    // specify a invalid format value
    response = r.path(filename).queryParam("format", "123").accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
    responseText = response.getEntity(String.class);
    assertEquals("The valid values for the parameter : format are " + WebAppUtils.listSupportedLogContentType(), responseText);
    assertEquals(400, response.getStatus());
    // ask for file that doesn't exist and it will re-direct to
    // the log server
    URI requestURI = r.path("uhhh").getURI();
    String redirectURL = getRedirectURL(requestURI.toString());
    assertTrue(redirectURL != null);
    assertTrue(redirectURL.contains(LOGSERVICEWSADDR));
    // Get container log files' name
    WebResource r1 = resource();
    response = r1.path("ws").path("v1").path("node").path("containers").path(containerIdStr).path("logs").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
    assertEquals(200, response.getStatus());
    List<ContainerLogsInfo> responseList = response.getEntity(new GenericType<List<ContainerLogsInfo>>() {
    });
    assertTrue(responseList.size() == 1);
    assertEquals(responseList.get(0).getLogType(), ContainerLogAggregationType.LOCAL.toString());
    List<PerContainerLogFileInfo> logMeta = responseList.get(0).getContainerLogsInfo();
    assertTrue(logMeta.size() == 1);
    assertEquals(logMeta.get(0).getFileName(), filename);
    // now create an aggregated log in Remote File system
    File tempLogDir = new File("target", TestNMWebServices.class.getSimpleName() + "temp-log-dir");
    try {
        String aggregatedLogFile = filename + "-aggregated";
        String aggregatedLogMessage = "This is aggregated ;og.";
        TestContainerLogsUtils.createContainerLogFileInRemoteFS(nmContext.getConf(), FileSystem.get(nmContext.getConf()), tempLogDir.getAbsolutePath(), containerId, nmContext.getNodeId(), aggregatedLogFile, "user", aggregatedLogMessage, true);
        r1 = resource();
        response = r1.path("ws").path("v1").path("node").path("containers").path(containerIdStr).path("logs").accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
        assertEquals(200, response.getStatus());
        responseList = response.getEntity(new GenericType<List<ContainerLogsInfo>>() {
        });
        assertEquals(responseList.size(), 2);
        for (ContainerLogsInfo logInfo : responseList) {
            if (logInfo.getLogType().equals(ContainerLogAggregationType.AGGREGATED.toString())) {
                List<PerContainerLogFileInfo> meta = logInfo.getContainerLogsInfo();
                assertTrue(meta.size() == 1);
                assertEquals(meta.get(0).getFileName(), aggregatedLogFile);
            } else {
                assertEquals(logInfo.getLogType(), ContainerLogAggregationType.LOCAL.toString());
                List<PerContainerLogFileInfo> meta = logInfo.getContainerLogsInfo();
                assertTrue(meta.size() == 1);
                assertEquals(meta.get(0).getFileName(), filename);
            }
        }
        // Test whether we could get aggregated log as well
        TestContainerLogsUtils.createContainerLogFileInRemoteFS(nmContext.getConf(), FileSystem.get(nmContext.getConf()), tempLogDir.getAbsolutePath(), containerId, nmContext.getNodeId(), filename, "user", aggregatedLogMessage, true);
        response = r.path(filename).accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
        responseText = response.getEntity(String.class);
        assertTrue(responseText.contains("LogAggregationType: " + ContainerLogAggregationType.AGGREGATED));
        assertTrue(responseText.contains(aggregatedLogMessage));
        assertTrue(responseText.contains("LogAggregationType: " + ContainerLogAggregationType.LOCAL));
        assertTrue(responseText.contains(logMessage));
    } finally {
        FileUtil.fullyDelete(tempLogDir);
    }
    // After container is completed, it is removed from nmContext
    nmContext.getContainers().remove(containerId);
    Assert.assertNull(nmContext.getContainers().get(containerId));
    response = r.path(filename).accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
    responseText = response.getEntity(String.class);
    assertTrue(responseText.contains(logMessage));
}
Also used : Path(org.apache.hadoop.fs.Path) ClientResponse(com.sun.jersey.api.client.ClientResponse) GenericType(com.sun.jersey.api.client.GenericType) ContainerLogsInfo(org.apache.hadoop.yarn.server.webapp.dao.ContainerLogsInfo) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ApplicationImpl(org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationImpl) WebResource(com.sun.jersey.api.client.WebResource) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) URI(java.net.URI) PerContainerLogFileInfo(org.apache.hadoop.yarn.logaggregation.PerContainerLogFileInfo) AsyncDispatcher(org.apache.hadoop.yarn.event.AsyncDispatcher) List(java.util.List) NodeList(org.w3c.dom.NodeList) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) File(java.io.File) PrintWriter(java.io.PrintWriter)

Example 2 with GenericType

use of com.sun.jersey.api.client.GenericType in project hadoop by apache.

the class TestTimelineReaderWebServicesHBaseStorage method testGetFlows.

@Test
public void testGetFlows() throws Exception {
    Client client = createClient();
    try {
        URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows");
        ClientResponse resp = getResponse(client, uri);
        Set<FlowActivityEntity> entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (FlowActivityEntity entity : entities) {
            assertTrue((entity.getId().endsWith("@flow_name") && entity.getFlowRuns().size() == 2) || (entity.getId().endsWith("@flow_name2") && entity.getFlowRuns().size() == 1));
        }
        // Query without specifying cluster ID.
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/flows/");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?limit=1");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        long firstFlowActivity = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(1425016501000L);
        DateFormat fmt = TimelineReaderWebServices.DATE_FORMAT.get();
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=" + fmt.format(firstFlowActivity) + "-" + fmt.format(dayTs));
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (FlowActivityEntity entity : entities) {
            assertTrue((entity.getId().endsWith("@flow_name") && entity.getFlowRuns().size() == 2) || (entity.getId().endsWith("@flow_name2") && entity.getFlowRuns().size() == 1));
        }
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=" + fmt.format(dayTs + (4 * 86400000L)));
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(0, entities.size());
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=-" + fmt.format(dayTs));
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=" + fmt.format(firstFlowActivity) + "-");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=20150711:20150714");
        verifyHttpResponse(client, uri, Status.BAD_REQUEST);
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=20150714-20150711");
        verifyHttpResponse(client, uri, Status.BAD_REQUEST);
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=2015071129-20150712");
        verifyHttpResponse(client, uri, Status.BAD_REQUEST);
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=20150711-2015071243");
        verifyHttpResponse(client, uri, Status.BAD_REQUEST);
    } finally {
        client.destroy();
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) GenericType(com.sun.jersey.api.client.GenericType) Set(java.util.Set) HashSet(java.util.HashSet) DateFormat(java.text.DateFormat) FlowActivityEntity(org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity) Client(com.sun.jersey.api.client.Client) URI(java.net.URI) Test(org.junit.Test)

Example 3 with GenericType

use of com.sun.jersey.api.client.GenericType in project hadoop by apache.

the class TestTimelineReaderWebServicesHBaseStorage method testGetEntitiesInfoFilters.

@Test
public void testGetEntitiesInfoFilters() throws Exception {
    Client client = createClient();
    try {
        // infofilters=info1 eq cluster1 OR info1 eq cluster2
        URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=info1%20eq%20cluster1%20OR%20info1%20eq" + "%20cluster2");
        ClientResponse resp = getResponse(client, uri);
        Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        // infofilters=info1 eq cluster1 AND info4 eq 35000
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=info1%20eq%20cluster1%20AND%20info4%20" + "eq%2035000");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(0, entities.size());
        // infofilters=info4 eq 35000 OR info4 eq 36000
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=info4%20eq%2035000%20OR%20info4%20eq" + "%2036000");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        // infofilters=(info1 eq cluster1 AND info4 eq 35000) OR
        // (info1 eq cluster2 AND info2 eq 2.0)
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=(info1%20eq%20cluster1%20AND%20info4%20" + "eq%2035000)%20OR%20(info1%20eq%20cluster2%20AND%20info2%20eq%202.0" + ")");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        int infoCnt = 0;
        for (TimelineEntity entity : entities) {
            infoCnt += entity.getInfo().size();
            assertTrue(entity.getId().equals("entity2"));
        }
        // Includes UID in info field even if fields not specified as INFO.
        assertEquals(1, infoCnt);
        // infofilters=(info1 eq cluster1 AND info4 eq 35000) OR
        // (info1 eq cluster2 AND info2 eq 2.0)
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=(info1%20eq%20cluster1%20AND%20info4%20" + "eq%2035000)%20OR%20(info1%20eq%20cluster2%20AND%20info2%20eq%20" + "2.0)&fields=INFO");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        infoCnt = 0;
        for (TimelineEntity entity : entities) {
            infoCnt += entity.getInfo().size();
            assertTrue(entity.getId().equals("entity2"));
        }
        // Includes UID in info field.
        assertEquals(4, infoCnt);
        // Test for behavior when compare op is ne(not equals) vs ene
        // (exists and not equals). info3 does not exist for entity2. For ne,
        // both entity1 and entity2 will be returned. For ene, only entity2 will
        // be returned as we are checking for existence too.
        // infofilters=info3 ne 39000
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=info3%20ne%2039000");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        // infofilters=info3 ene 39000
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=info3%20ene%2039000");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1"));
        }
    } finally {
        client.destroy();
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) GenericType(com.sun.jersey.api.client.GenericType) Set(java.util.Set) HashSet(java.util.HashSet) Client(com.sun.jersey.api.client.Client) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) URI(java.net.URI) Test(org.junit.Test)

Example 4 with GenericType

use of com.sun.jersey.api.client.GenericType in project hadoop by apache.

the class TestTimelineReaderWebServicesHBaseStorage method testGetEntitiesMetricFilters.

@Test
public void testGetEntitiesMetricFilters() throws Exception {
    Client client = createClient();
    try {
        // metricfilters=HDFS_BYTES_READ lt 60 OR HDFS_BYTES_READ eq 157
        URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?metricfilters=HDFS_BYTES_READ%20lt%2060%20OR%20" + "HDFS_BYTES_READ%20eq%20157");
        ClientResponse resp = getResponse(client, uri);
        Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        // metricfilters=HDFS_BYTES_READ lt 60 AND MAP_SLOT_MILLIS gt 40
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?metricfilters=HDFS_BYTES_READ%20lt%2060%20AND%20" + "MAP_SLOT_MILLIS%20gt%2040");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(0, entities.size());
        // metricfilters=(HDFS_BYTES_READ lt 60 AND MAP_SLOT_MILLIS gt 40) OR
        // (MAP1_SLOT_MILLIS ge 140 AND MAP11_SLOT_MILLIS le 122)
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?metricfilters=(HDFS_BYTES_READ%20lt%2060%20AND%20" + "MAP_SLOT_MILLIS%20gt%2040)%20OR%20(MAP1_SLOT_MILLIS%20ge" + "%20140%20AND%20MAP11_SLOT_MILLIS%20le%20122)");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        int metricCnt = 0;
        for (TimelineEntity entity : entities) {
            metricCnt += entity.getMetrics().size();
            assertTrue(entity.getId().equals("entity2"));
        }
        assertEquals(0, metricCnt);
        // metricfilters=(HDFS_BYTES_READ lt 60 AND MAP_SLOT_MILLIS gt 40) OR
        // (MAP1_SLOT_MILLIS ge 140 AND MAP11_SLOT_MILLIS le 122)
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?metricfilters=(HDFS_BYTES_READ%20lt%2060%20AND%20" + "MAP_SLOT_MILLIS%20gt%2040)%20OR%20(MAP1_SLOT_MILLIS%20ge" + "%20140%20AND%20MAP11_SLOT_MILLIS%20le%20122)&fields=METRICS");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        metricCnt = 0;
        for (TimelineEntity entity : entities) {
            metricCnt += entity.getMetrics().size();
            assertTrue(entity.getId().equals("entity2"));
        }
        assertEquals(3, metricCnt);
        // metricfilters=(HDFS_BYTES_READ lt 60 AND MAP_SLOT_MILLIS gt 40) OR
        // (MAP1_SLOT_MILLIS ge 140 AND MAP11_SLOT_MILLIS le 122)
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?metricfilters=(HDFS_BYTES_READ%20lt%2060%20AND%20" + "MAP_SLOT_MILLIS%20gt%2040)%20OR%20(MAP1_SLOT_MILLIS%20ge" + "%20140%20AND%20MAP11_SLOT_MILLIS%20le%20122)&metricstoretrieve=" + "!(HDFS)");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        metricCnt = 0;
        for (TimelineEntity entity : entities) {
            metricCnt += entity.getMetrics().size();
            assertTrue(entity.getId().equals("entity2"));
            for (TimelineMetric metric : entity.getMetrics()) {
                assertTrue(metric.getId().startsWith("MAP1"));
                assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
            }
        }
        assertEquals(2, metricCnt);
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?metricfilters=(HDFS_BYTES_READ%20lt%2060%20AND%20" + "MAP_SLOT_MILLIS%20gt%2040)%20OR%20(MAP1_SLOT_MILLIS%20ge" + "%20140%20AND%20MAP11_SLOT_MILLIS%20le%20122)&metricstoretrieve=" + "!(HDFS)&metricslimit=10");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        metricCnt = 0;
        for (TimelineEntity entity : entities) {
            metricCnt += entity.getMetrics().size();
            assertTrue(entity.getId().equals("entity2"));
            for (TimelineMetric metric : entity.getMetrics()) {
                assertTrue(metric.getId().startsWith("MAP1"));
                if (metric.getId().equals("MAP1_SLOT_MILLIS")) {
                    assertEquals(2, metric.getValues().size());
                    assertEquals(TimelineMetric.Type.TIME_SERIES, metric.getType());
                } else if (metric.getId().equals("MAP11_SLOT_MILLIS")) {
                    assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
                } else {
                    fail("Unexpected metric id");
                }
            }
        }
        assertEquals(2, metricCnt);
        // Test for behavior when compare op is ne(not equals) vs ene
        // (exists and not equals). MAP11_SLOT_MILLIS does not exist for
        // entity1. For ne, both entity1 and entity2 will be returned. For ene,
        // only entity2 will be returned as we are checking for existence too.
        // metricfilters=MAP11_SLOT_MILLIS ne 100
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?metricfilters=MAP11_SLOT_MILLIS%20ne%20100");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        // metricfilters=MAP11_SLOT_MILLIS ene 100
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?metricfilters=MAP11_SLOT_MILLIS%20ene%20100");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity2"));
        }
    } finally {
        client.destroy();
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) GenericType(com.sun.jersey.api.client.GenericType) Set(java.util.Set) HashSet(java.util.HashSet) Client(com.sun.jersey.api.client.Client) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) URI(java.net.URI) Test(org.junit.Test)

Example 5 with GenericType

use of com.sun.jersey.api.client.GenericType in project hadoop by apache.

the class TestTimelineReaderWebServicesHBaseStorage method testGetEntitiesRelationFilters.

@Test
public void testGetEntitiesRelationFilters() throws Exception {
    Client client = createClient();
    try {
        URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?isrelatedto=type3:entity31,type2:entity21:entity22");
        ClientResponse resp = getResponse(client, uri);
        Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "clusters/cluster1/apps/application_1111111111_1111/entities/type1" + "?isrelatedto=!(type3:entity31,type2:entity21:entity22)");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(0, entities.size());
        // isrelatedto=!(type3:entity31,type2:entity21:entity22)OR type5:entity51,
        // type6:entity61:entity66
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "clusters/cluster1/apps/application_1111111111_1111/entities/type1" + "?isrelatedto=!(type3:entity31,type2:entity21:entity22)%20OR%20" + "type5:entity51,type6:entity61:entity66");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity2"));
        }
        // isrelatedto=(!(type3:entity31,type2:entity21:entity22)OR type5:
        // entity51,type6:entity61:entity66) OR (type1:entity14,type2:entity21:
        // entity22 AND (type3:entity32:entity35,type4:entity42))
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "clusters/cluster1/apps/application_1111111111_1111/entities/type1" + "?isrelatedto=(!(type3:entity31,type2:entity21:entity22)%20OR%20" + "type5:entity51,type6:entity61:entity66)%20OR%20(type1:entity14," + "type2:entity21:entity22%20AND%20(type3:entity32:entity35," + "type4:entity42))");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        // relatesto=!(type3:entity31,type2:entity21:entity22)OR type5:entity51,
        // type6:entity61:entity66
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "clusters/cluster1/apps/application_1111111111_1111/entities/type1" + "?relatesto=!%20(type3:entity31,type2:entity21:entity22%20)%20OR%20" + "type5:entity51,type6:entity61:entity66");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity2"));
        }
        // relatesto=(!(type3:entity31,type2:entity21:entity22)OR type5:entity51,
        // type6:entity61:entity66) OR (type1:entity14,type2:entity21:entity22 AND
        // (type3:entity32:entity35 , type4:entity42))
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "clusters/cluster1/apps/application_1111111111_1111/entities/type1" + "?relatesto=(!(%20type3:entity31,type2:entity21:entity22)%20OR%20" + "type5:entity51,type6:entity61:entity66%20)%20OR%20(type1:entity14," + "type2:entity21:entity22%20AND%20(type3:entity32:entity35%20,%20" + "type4:entity42))");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
    } finally {
        client.destroy();
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) GenericType(com.sun.jersey.api.client.GenericType) Set(java.util.Set) HashSet(java.util.HashSet) Client(com.sun.jersey.api.client.Client) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) URI(java.net.URI) Test(org.junit.Test)

Aggregations

ClientResponse (com.sun.jersey.api.client.ClientResponse)19 GenericType (com.sun.jersey.api.client.GenericType)19 URI (java.net.URI)19 Test (org.junit.Test)18 Client (com.sun.jersey.api.client.Client)17 Set (java.util.Set)17 TimelineEntity (org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity)14 HashSet (java.util.HashSet)12 TimelineMetric (org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric)5 WebResource (com.sun.jersey.api.client.WebResource)2 List (java.util.List)2 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)2 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)2 FlowRunEntity (org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity)2 PerContainerLogFileInfo (org.apache.hadoop.yarn.logaggregation.PerContainerLogFileInfo)2 ContainerLogsInfo (org.apache.hadoop.yarn.server.webapp.dao.ContainerLogsInfo)2 File (java.io.File)1 PrintWriter (java.io.PrintWriter)1 DateFormat (java.text.DateFormat)1 Configuration (org.apache.hadoop.conf.Configuration)1