Search in sources :

Example 61 with Set

use of java.util.Set in project hadoop by apache.

the class TestHBaseTimelineStorageApps method testWriteApplicationToHBase.

@Test
public void testWriteApplicationToHBase() throws Exception {
    TimelineEntities te = new TimelineEntities();
    ApplicationEntity entity = new ApplicationEntity();
    String appId = "application_1000178881110_2002";
    entity.setId(appId);
    Long cTime = 1425016501000L;
    entity.setCreatedTime(cTime);
    // add the info map in Timeline Entity
    Map<String, Object> infoMap = new HashMap<String, Object>();
    infoMap.put("infoMapKey1", "infoMapValue1");
    infoMap.put("infoMapKey2", 10);
    entity.addInfo(infoMap);
    // add the isRelatedToEntity info
    String key = "task";
    String value = "is_related_to_entity_id_here";
    Set<String> isRelatedToSet = new HashSet<String>();
    isRelatedToSet.add(value);
    Map<String, Set<String>> isRelatedTo = new HashMap<String, Set<String>>();
    isRelatedTo.put(key, isRelatedToSet);
    entity.setIsRelatedToEntities(isRelatedTo);
    // add the relatesTo info
    key = "container";
    value = "relates_to_entity_id_here";
    Set<String> relatesToSet = new HashSet<String>();
    relatesToSet.add(value);
    value = "relates_to_entity_id_here_Second";
    relatesToSet.add(value);
    Map<String, Set<String>> relatesTo = new HashMap<String, Set<String>>();
    relatesTo.put(key, relatesToSet);
    entity.setRelatesToEntities(relatesTo);
    // add some config entries
    Map<String, String> conf = new HashMap<String, String>();
    conf.put("config_param1", "value1");
    conf.put("config_param2", "value2");
    entity.addConfigs(conf);
    // add metrics
    Set<TimelineMetric> metrics = new HashSet<>();
    TimelineMetric m1 = new TimelineMetric();
    m1.setId("MAP_SLOT_MILLIS");
    Map<Long, Number> metricValues = new HashMap<Long, Number>();
    long ts = System.currentTimeMillis();
    metricValues.put(ts - 120000, 100000000);
    metricValues.put(ts - 100000, 200000000);
    metricValues.put(ts - 80000, 300000000);
    metricValues.put(ts - 60000, 400000000);
    metricValues.put(ts - 40000, 50000000000L);
    metricValues.put(ts - 20000, 60000000000L);
    m1.setType(Type.TIME_SERIES);
    m1.setValues(metricValues);
    metrics.add(m1);
    entity.addMetrics(metrics);
    // add aggregated metrics
    TimelineEntity aggEntity = new TimelineEntity();
    String type = TimelineEntityType.YARN_APPLICATION.toString();
    aggEntity.setId(appId);
    aggEntity.setType(type);
    long cTime2 = 1425016502000L;
    aggEntity.setCreatedTime(cTime2);
    TimelineMetric aggMetric = new TimelineMetric();
    aggMetric.setId("MEM_USAGE");
    Map<Long, Number> aggMetricValues = new HashMap<Long, Number>();
    long aggTs = ts;
    aggMetricValues.put(aggTs - 120000, 102400000L);
    aggMetric.setType(Type.SINGLE_VALUE);
    aggMetric.setRealtimeAggregationOp(TimelineMetricOperation.SUM);
    aggMetric.setValues(aggMetricValues);
    Set<TimelineMetric> aggMetrics = new HashSet<>();
    aggMetrics.add(aggMetric);
    entity.addMetrics(aggMetrics);
    te.addEntity(entity);
    HBaseTimelineWriterImpl hbi = null;
    try {
        Configuration c1 = util.getConfiguration();
        hbi = new HBaseTimelineWriterImpl();
        hbi.init(c1);
        hbi.start();
        String cluster = "cluster_test_write_app";
        String user = "user1";
        String flow = "s!ome_f\tlow  _n am!e";
        String flowVersion = "AB7822C10F1111";
        long runid = 1002345678919L;
        hbi.write(cluster, user, flow, flowVersion, runid, appId, te);
        // Write entity again, this time without created time.
        entity = new ApplicationEntity();
        appId = "application_1000178881110_2002";
        entity.setId(appId);
        // add the info map in Timeline Entity
        Map<String, Object> infoMap1 = new HashMap<>();
        infoMap1.put("infoMapKey3", "infoMapValue1");
        entity.addInfo(infoMap1);
        te = new TimelineEntities();
        te.addEntity(entity);
        hbi.write(cluster, user, flow, flowVersion, runid, appId, te);
        hbi.stop();
        infoMap.putAll(infoMap1);
        // retrieve the row
        ApplicationRowKey applicationRowKey = new ApplicationRowKey(cluster, user, flow, runid, appId);
        byte[] rowKey = applicationRowKey.getRowKey();
        Get get = new Get(rowKey);
        get.setMaxVersions(Integer.MAX_VALUE);
        Connection conn = ConnectionFactory.createConnection(c1);
        Result result = new ApplicationTable().getResult(c1, conn, get);
        assertTrue(result != null);
        assertEquals(17, result.size());
        // check the row key
        byte[] row1 = result.getRow();
        assertTrue(isApplicationRowKeyCorrect(row1, cluster, user, flow, runid, appId));
        // check info column family
        String id1 = ApplicationColumn.ID.readResult(result).toString();
        assertEquals(appId, id1);
        Long cTime1 = (Long) ApplicationColumn.CREATED_TIME.readResult(result);
        assertEquals(cTime, cTime1);
        Map<String, Object> infoColumns = ApplicationColumnPrefix.INFO.readResults(result, new StringKeyConverter());
        assertEquals(infoMap, infoColumns);
        // Remember isRelatedTo is of type Map<String, Set<String>>
        for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo.entrySet()) {
            Object isRelatedToValue = ApplicationColumnPrefix.IS_RELATED_TO.readResult(result, isRelatedToEntry.getKey());
            String compoundValue = isRelatedToValue.toString();
            // id7?id9?id6
            Set<String> isRelatedToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
            assertEquals(isRelatedTo.get(isRelatedToEntry.getKey()).size(), isRelatedToValues.size());
            for (String v : isRelatedToEntry.getValue()) {
                assertTrue(isRelatedToValues.contains(v));
            }
        }
        // RelatesTo
        for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo.entrySet()) {
            String compoundValue = ApplicationColumnPrefix.RELATES_TO.readResult(result, relatesToEntry.getKey()).toString();
            // id3?id4?id5
            Set<String> relatesToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
            assertEquals(relatesTo.get(relatesToEntry.getKey()).size(), relatesToValues.size());
            for (String v : relatesToEntry.getValue()) {
                assertTrue(relatesToValues.contains(v));
            }
        }
        KeyConverter<String> stringKeyConverter = new StringKeyConverter();
        // Configuration
        Map<String, Object> configColumns = ApplicationColumnPrefix.CONFIG.readResults(result, stringKeyConverter);
        assertEquals(conf, configColumns);
        NavigableMap<String, NavigableMap<Long, Number>> metricsResult = ApplicationColumnPrefix.METRIC.readResultsWithTimestamps(result, stringKeyConverter);
        NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
        matchMetrics(metricValues, metricMap);
        // read the timeline entity using the reader this time. In metrics limit
        // specify Integer MAX_VALUE. A TIME_SERIES will be returned(if more than
        // one value exists for a metric).
        TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), Integer.MAX_VALUE));
        assertNotNull(e1);
        // verify attributes
        assertEquals(appId, e1.getId());
        assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
        assertEquals(cTime, e1.getCreatedTime());
        Map<String, Object> infoMap2 = e1.getInfo();
        assertEquals(infoMap, infoMap2);
        Map<String, Set<String>> isRelatedTo2 = e1.getIsRelatedToEntities();
        assertEquals(isRelatedTo, isRelatedTo2);
        Map<String, Set<String>> relatesTo2 = e1.getRelatesToEntities();
        assertEquals(relatesTo, relatesTo2);
        Map<String, String> conf2 = e1.getConfigs();
        assertEquals(conf, conf2);
        Set<TimelineMetric> metrics2 = e1.getMetrics();
        assertEquals(2, metrics2.size());
        for (TimelineMetric metric2 : metrics2) {
            Map<Long, Number> metricValues2 = metric2.getValues();
            assertTrue(metric2.getId().equals("MAP_SLOT_MILLIS") || metric2.getId().equals("MEM_USAGE"));
            if (metric2.getId().equals("MAP_SLOT_MILLIS")) {
                assertEquals(6, metricValues2.size());
                matchMetrics(metricValues, metricValues2);
            }
            if (metric2.getId().equals("MEM_USAGE")) {
                assertEquals(1, metricValues2.size());
                matchMetrics(aggMetricValues, metricValues2);
            }
        }
        // In metrics limit specify a value of 3. No more than 3 values for a
        // metric will be returned.
        e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), 3));
        assertNotNull(e1);
        assertEquals(appId, e1.getId());
        assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
        assertEquals(conf, e1.getConfigs());
        metrics2 = e1.getMetrics();
        assertEquals(2, metrics2.size());
        for (TimelineMetric metric2 : metrics2) {
            Map<Long, Number> metricValues2 = metric2.getValues();
            assertTrue(metricValues2.size() <= 3);
            assertTrue(metric2.getId().equals("MAP_SLOT_MILLIS") || metric2.getId().equals("MEM_USAGE"));
        }
        // Check if single value(latest value) instead of time series is returned
        // if metricslimit is not set(null), irrespective of number of metric
        // values.
        e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appId, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(TimelineReader.Field.ALL), null));
        assertNotNull(e1);
        assertEquals(appId, e1.getId());
        assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), e1.getType());
        assertEquals(cTime, e1.getCreatedTime());
        assertEquals(infoMap, e1.getInfo());
        assertEquals(isRelatedTo, e1.getIsRelatedToEntities());
        assertEquals(relatesTo, e1.getRelatesToEntities());
        assertEquals(conf, e1.getConfigs());
        assertEquals(2, e1.getMetrics().size());
        for (TimelineMetric metric : e1.getMetrics()) {
            assertEquals(1, metric.getValues().size());
            assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
            assertTrue(metric.getId().equals("MAP_SLOT_MILLIS") || metric.getId().equals("MEM_USAGE"));
            assertEquals(1, metric.getValues().size());
            if (metric.getId().equals("MAP_SLOT_MILLIS")) {
                assertTrue(metric.getValues().containsKey(ts - 20000));
                assertEquals(metricValues.get(ts - 20000), metric.getValues().get(ts - 20000));
            }
            if (metric.getId().equals("MEM_USAGE")) {
                assertTrue(metric.getValues().containsKey(aggTs - 120000));
                assertEquals(aggMetricValues.get(aggTs - 120000), metric.getValues().get(aggTs - 120000));
            }
        }
    } finally {
        if (hbi != null) {
            hbi.stop();
            hbi.close();
        }
    }
}
Also used : StringKeyConverter(org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter) TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) EnumSet(java.util.EnumSet) Set(java.util.Set) NavigableSet(java.util.NavigableSet) HashSet(java.util.HashSet) Configuration(org.apache.hadoop.conf.Configuration) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap) TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) Result(org.apache.hadoop.hbase.client.Result) ApplicationTable(org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) HashSet(java.util.HashSet) Connection(org.apache.hadoop.hbase.client.Connection) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) ApplicationEntity(org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity) ApplicationRowKey(org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey) Get(org.apache.hadoop.hbase.client.Get) Map(java.util.Map) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap) Test(org.junit.Test)

Example 62 with Set

use of java.util.Set in project hadoop by apache.

the class TestHBaseTimelineStorageEntities method testWriteEntityToHBase.

@Test
public void testWriteEntityToHBase() throws Exception {
    TimelineEntities te = new TimelineEntities();
    TimelineEntity entity = new TimelineEntity();
    String id = "hello";
    String type = "world";
    entity.setId(id);
    entity.setType(type);
    Long cTime = 1425016501000L;
    entity.setCreatedTime(cTime);
    // add the info map in Timeline Entity
    Map<String, Object> infoMap = new HashMap<String, Object>();
    infoMap.put("infoMapKey1", "infoMapValue1");
    infoMap.put("infoMapKey2", 10);
    entity.addInfo(infoMap);
    // add the isRelatedToEntity info
    String key = "task";
    String value = "is_related_to_entity_id_here";
    Set<String> isRelatedToSet = new HashSet<String>();
    isRelatedToSet.add(value);
    Map<String, Set<String>> isRelatedTo = new HashMap<String, Set<String>>();
    isRelatedTo.put(key, isRelatedToSet);
    entity.setIsRelatedToEntities(isRelatedTo);
    // add the relatesTo info
    key = "container";
    value = "relates_to_entity_id_here";
    Set<String> relatesToSet = new HashSet<String>();
    relatesToSet.add(value);
    value = "relates_to_entity_id_here_Second";
    relatesToSet.add(value);
    Map<String, Set<String>> relatesTo = new HashMap<String, Set<String>>();
    relatesTo.put(key, relatesToSet);
    entity.setRelatesToEntities(relatesTo);
    // add some config entries
    Map<String, String> conf = new HashMap<String, String>();
    conf.put("config_param1", "value1");
    conf.put("config_param2", "value2");
    entity.addConfigs(conf);
    // add metrics
    Set<TimelineMetric> metrics = new HashSet<>();
    TimelineMetric m1 = new TimelineMetric();
    m1.setId("MAP_SLOT_MILLIS");
    Map<Long, Number> metricValues = new HashMap<Long, Number>();
    long ts = System.currentTimeMillis();
    metricValues.put(ts - 120000, 100000000);
    metricValues.put(ts - 100000, 200000000);
    metricValues.put(ts - 80000, 300000000);
    metricValues.put(ts - 60000, 400000000);
    metricValues.put(ts - 40000, 50000000000L);
    metricValues.put(ts - 20000, 60000000000L);
    m1.setType(Type.TIME_SERIES);
    m1.setValues(metricValues);
    metrics.add(m1);
    entity.addMetrics(metrics);
    te.addEntity(entity);
    HBaseTimelineWriterImpl hbi = null;
    try {
        Configuration c1 = util.getConfiguration();
        hbi = new HBaseTimelineWriterImpl();
        hbi.init(c1);
        hbi.start();
        String cluster = "cluster_test_write_entity";
        String user = "user1";
        String flow = "some_flow_name";
        String flowVersion = "AB7822C10F1111";
        long runid = 1002345678919L;
        String appName = ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1).toString();
        hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
        hbi.stop();
        // scan the table and see that entity exists
        Scan s = new Scan();
        byte[] startRow = new EntityRowKeyPrefix(cluster, user, flow, runid, appName).getRowKeyPrefix();
        s.setStartRow(startRow);
        s.setMaxVersions(Integer.MAX_VALUE);
        Connection conn = ConnectionFactory.createConnection(c1);
        ResultScanner scanner = new EntityTable().getResultScanner(c1, conn, s);
        int rowCount = 0;
        int colCount = 0;
        KeyConverter<String> stringKeyConverter = new StringKeyConverter();
        for (Result result : scanner) {
            if (result != null && !result.isEmpty()) {
                rowCount++;
                colCount += result.size();
                byte[] row1 = result.getRow();
                assertTrue(isRowKeyCorrect(row1, cluster, user, flow, runid, appName, entity));
                // check info column family
                String id1 = EntityColumn.ID.readResult(result).toString();
                assertEquals(id, id1);
                String type1 = EntityColumn.TYPE.readResult(result).toString();
                assertEquals(type, type1);
                Long cTime1 = (Long) EntityColumn.CREATED_TIME.readResult(result);
                assertEquals(cTime1, cTime);
                Map<String, Object> infoColumns = EntityColumnPrefix.INFO.readResults(result, new StringKeyConverter());
                assertEquals(infoMap, infoColumns);
                // Remember isRelatedTo is of type Map<String, Set<String>>
                for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo.entrySet()) {
                    Object isRelatedToValue = EntityColumnPrefix.IS_RELATED_TO.readResult(result, isRelatedToEntry.getKey());
                    String compoundValue = isRelatedToValue.toString();
                    // id7?id9?id6
                    Set<String> isRelatedToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
                    assertEquals(isRelatedTo.get(isRelatedToEntry.getKey()).size(), isRelatedToValues.size());
                    for (String v : isRelatedToEntry.getValue()) {
                        assertTrue(isRelatedToValues.contains(v));
                    }
                }
                // RelatesTo
                for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo.entrySet()) {
                    String compoundValue = EntityColumnPrefix.RELATES_TO.readResult(result, relatesToEntry.getKey()).toString();
                    // id3?id4?id5
                    Set<String> relatesToValues = new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
                    assertEquals(relatesTo.get(relatesToEntry.getKey()).size(), relatesToValues.size());
                    for (String v : relatesToEntry.getValue()) {
                        assertTrue(relatesToValues.contains(v));
                    }
                }
                // Configuration
                Map<String, Object> configColumns = EntityColumnPrefix.CONFIG.readResults(result, stringKeyConverter);
                assertEquals(conf, configColumns);
                NavigableMap<String, NavigableMap<Long, Number>> metricsResult = EntityColumnPrefix.METRIC.readResultsWithTimestamps(result, stringKeyConverter);
                NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
                matchMetrics(metricValues, metricMap);
            }
        }
        assertEquals(1, rowCount);
        assertEquals(16, colCount);
        // read the timeline entity using the reader this time
        TimelineEntity e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), Integer.MAX_VALUE));
        Set<TimelineEntity> es1 = reader.getEntities(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), null), new TimelineEntityFilters(), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), Integer.MAX_VALUE));
        assertNotNull(e1);
        assertEquals(1, es1.size());
        // verify attributes
        assertEquals(id, e1.getId());
        assertEquals(type, e1.getType());
        assertEquals(cTime, e1.getCreatedTime());
        Map<String, Object> infoMap2 = e1.getInfo();
        assertEquals(infoMap, infoMap2);
        Map<String, Set<String>> isRelatedTo2 = e1.getIsRelatedToEntities();
        assertEquals(isRelatedTo, isRelatedTo2);
        Map<String, Set<String>> relatesTo2 = e1.getRelatesToEntities();
        assertEquals(relatesTo, relatesTo2);
        Map<String, String> conf2 = e1.getConfigs();
        assertEquals(conf, conf2);
        Set<TimelineMetric> metrics2 = e1.getMetrics();
        assertEquals(metrics, metrics2);
        for (TimelineMetric metric2 : metrics2) {
            Map<Long, Number> metricValues2 = metric2.getValues();
            matchMetrics(metricValues, metricValues2);
        }
        e1 = reader.getEntity(new TimelineReaderContext(cluster, user, flow, runid, appName, entity.getType(), entity.getId()), new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
        assertNotNull(e1);
        assertEquals(id, e1.getId());
        assertEquals(type, e1.getType());
        assertEquals(cTime, e1.getCreatedTime());
        assertEquals(infoMap, e1.getInfo());
        assertEquals(isRelatedTo, e1.getIsRelatedToEntities());
        assertEquals(relatesTo, e1.getRelatesToEntities());
        assertEquals(conf, e1.getConfigs());
        for (TimelineMetric metric : e1.getMetrics()) {
            assertEquals(TimelineMetric.Type.SINGLE_VALUE, metric.getType());
            assertEquals(1, metric.getValues().size());
            assertTrue(metric.getValues().containsKey(ts - 20000));
            assertEquals(metricValues.get(ts - 20000), metric.getValues().get(ts - 20000));
        }
    } finally {
        if (hbi != null) {
            hbi.stop();
            hbi.close();
        }
    }
}
Also used : StringKeyConverter(org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter) TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) EnumSet(java.util.EnumSet) Set(java.util.Set) NavigableSet(java.util.NavigableSet) HashSet(java.util.HashSet) Configuration(org.apache.hadoop.conf.Configuration) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap) TimelineReaderContext(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext) EntityRowKeyPrefix(org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix) Result(org.apache.hadoop.hbase.client.Result) TimelineEntities(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities) HashSet(java.util.HashSet) EntityTable(org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) TimelineEntityFilters(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters) Connection(org.apache.hadoop.hbase.client.Connection) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) TimelineDataToRetrieve(org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve) Scan(org.apache.hadoop.hbase.client.Scan) Map(java.util.Map) NavigableMap(java.util.NavigableMap) HashMap(java.util.HashMap) Test(org.junit.Test)

Example 63 with Set

use of java.util.Set in project hadoop by apache.

the class TestTimelineReaderWebServicesHBaseStorage method testGetFlows.

@Test
public void testGetFlows() throws Exception {
    Client client = createClient();
    try {
        URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows");
        ClientResponse resp = getResponse(client, uri);
        Set<FlowActivityEntity> entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (FlowActivityEntity entity : entities) {
            assertTrue((entity.getId().endsWith("@flow_name") && entity.getFlowRuns().size() == 2) || (entity.getId().endsWith("@flow_name2") && entity.getFlowRuns().size() == 1));
        }
        // Query without specifying cluster ID.
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/flows/");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?limit=1");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        long firstFlowActivity = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(1425016501000L);
        DateFormat fmt = TimelineReaderWebServices.DATE_FORMAT.get();
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=" + fmt.format(firstFlowActivity) + "-" + fmt.format(dayTs));
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (FlowActivityEntity entity : entities) {
            assertTrue((entity.getId().endsWith("@flow_name") && entity.getFlowRuns().size() == 2) || (entity.getId().endsWith("@flow_name2") && entity.getFlowRuns().size() == 1));
        }
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=" + fmt.format(dayTs + (4 * 86400000L)));
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(0, entities.size());
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=-" + fmt.format(dayTs));
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=" + fmt.format(firstFlowActivity) + "-");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<FlowActivityEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=20150711:20150714");
        verifyHttpResponse(client, uri, Status.BAD_REQUEST);
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=20150714-20150711");
        verifyHttpResponse(client, uri, Status.BAD_REQUEST);
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=2015071129-20150712");
        verifyHttpResponse(client, uri, Status.BAD_REQUEST);
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/flows?daterange=20150711-2015071243");
        verifyHttpResponse(client, uri, Status.BAD_REQUEST);
    } finally {
        client.destroy();
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) GenericType(com.sun.jersey.api.client.GenericType) Set(java.util.Set) HashSet(java.util.HashSet) DateFormat(java.text.DateFormat) FlowActivityEntity(org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity) Client(com.sun.jersey.api.client.Client) URI(java.net.URI) Test(org.junit.Test)

Example 64 with Set

use of java.util.Set in project hadoop by apache.

the class TestTimelineReaderWebServicesHBaseStorage method testGetEntitiesInfoFilters.

@Test
public void testGetEntitiesInfoFilters() throws Exception {
    Client client = createClient();
    try {
        // infofilters=info1 eq cluster1 OR info1 eq cluster2
        URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=info1%20eq%20cluster1%20OR%20info1%20eq" + "%20cluster2");
        ClientResponse resp = getResponse(client, uri);
        Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        // infofilters=info1 eq cluster1 AND info4 eq 35000
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=info1%20eq%20cluster1%20AND%20info4%20" + "eq%2035000");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(0, entities.size());
        // infofilters=info4 eq 35000 OR info4 eq 36000
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=info4%20eq%2035000%20OR%20info4%20eq" + "%2036000");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        // infofilters=(info1 eq cluster1 AND info4 eq 35000) OR
        // (info1 eq cluster2 AND info2 eq 2.0)
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=(info1%20eq%20cluster1%20AND%20info4%20" + "eq%2035000)%20OR%20(info1%20eq%20cluster2%20AND%20info2%20eq%202.0" + ")");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        int infoCnt = 0;
        for (TimelineEntity entity : entities) {
            infoCnt += entity.getInfo().size();
            assertTrue(entity.getId().equals("entity2"));
        }
        // Includes UID in info field even if fields not specified as INFO.
        assertEquals(1, infoCnt);
        // infofilters=(info1 eq cluster1 AND info4 eq 35000) OR
        // (info1 eq cluster2 AND info2 eq 2.0)
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=(info1%20eq%20cluster1%20AND%20info4%20" + "eq%2035000)%20OR%20(info1%20eq%20cluster2%20AND%20info2%20eq%20" + "2.0)&fields=INFO");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        infoCnt = 0;
        for (TimelineEntity entity : entities) {
            infoCnt += entity.getInfo().size();
            assertTrue(entity.getId().equals("entity2"));
        }
        // Includes UID in info field.
        assertEquals(4, infoCnt);
        // Test for behavior when compare op is ne(not equals) vs ene
        // (exists and not equals). info3 does not exist for entity2. For ne,
        // both entity1 and entity2 will be returned. For ene, only entity2 will
        // be returned as we are checking for existence too.
        // infofilters=info3 ne 39000
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=info3%20ne%2039000");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(2, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1") || entity.getId().equals("entity2"));
        }
        // infofilters=info3 ene 39000
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/application_1111111111_1111/" + "entities/type1?infofilters=info3%20ene%2039000");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities);
        assertEquals(1, entities.size());
        for (TimelineEntity entity : entities) {
            assertTrue(entity.getId().equals("entity1"));
        }
    } finally {
        client.destroy();
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) GenericType(com.sun.jersey.api.client.GenericType) Set(java.util.Set) HashSet(java.util.HashSet) Client(com.sun.jersey.api.client.Client) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) URI(java.net.URI) Test(org.junit.Test)

Example 65 with Set

use of java.util.Set in project hadoop by apache.

the class TestTimelineReaderWebServicesHBaseStorage method testUIDQueryWithAndWithoutFlowContextInfo.

@Test
public void testUIDQueryWithAndWithoutFlowContextInfo() throws Exception {
    Client client = createClient();
    try {
        String appUIDWithFlowInfo = "cluster1!user1!flow_name!1002345678919!application_1111111111_1111";
        URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/app-uid/" + appUIDWithFlowInfo);
        ClientResponse resp = getResponse(client, uri);
        TimelineEntity appEntity1 = resp.getEntity(TimelineEntity.class);
        assertNotNull(appEntity1);
        assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), appEntity1.getType());
        assertEquals("application_1111111111_1111", appEntity1.getId());
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "app-uid/" + appUIDWithFlowInfo + "/entities/type1");
        resp = getResponse(client, uri);
        Set<TimelineEntity> entities1 = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities1);
        assertEquals(2, entities1.size());
        for (TimelineEntity entity : entities1) {
            assertNotNull(entity.getInfo());
            assertEquals(1, entity.getInfo().size());
            String uid = (String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
            assertNotNull(uid);
            assertTrue(uid.equals(appUIDWithFlowInfo + "!type1!entity1") || uid.equals(appUIDWithFlowInfo + "!type1!entity2"));
        }
        String appUIDWithoutFlowInfo = "cluster1!application_1111111111_1111";
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "app-uid/" + appUIDWithoutFlowInfo);
        resp = getResponse(client, uri);
        TimelineEntity appEntity2 = resp.getEntity(TimelineEntity.class);
        assertNotNull(appEntity2);
        assertEquals(TimelineEntityType.YARN_APPLICATION.toString(), appEntity2.getType());
        assertEquals("application_1111111111_1111", appEntity2.getId());
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "app-uid/" + appUIDWithoutFlowInfo + "/entities/type1");
        resp = getResponse(client, uri);
        Set<TimelineEntity> entities2 = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertNotNull(entities2);
        assertEquals(2, entities2.size());
        for (TimelineEntity entity : entities2) {
            assertNotNull(entity.getInfo());
            assertEquals(1, entity.getInfo().size());
            String uid = (String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
            assertNotNull(uid);
            assertTrue(uid.equals(appUIDWithoutFlowInfo + "!type1!entity1") || uid.equals(appUIDWithoutFlowInfo + "!type1!entity2"));
        }
        String entityUIDWithFlowInfo = appUIDWithFlowInfo + "!type1!entity1";
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "entity-uid/" + entityUIDWithFlowInfo);
        resp = getResponse(client, uri);
        TimelineEntity singleEntity1 = resp.getEntity(TimelineEntity.class);
        assertNotNull(singleEntity1);
        assertEquals("type1", singleEntity1.getType());
        assertEquals("entity1", singleEntity1.getId());
        String entityUIDWithoutFlowInfo = appUIDWithoutFlowInfo + "!type1!entity1";
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "entity-uid/" + entityUIDWithoutFlowInfo);
        resp = getResponse(client, uri);
        TimelineEntity singleEntity2 = resp.getEntity(TimelineEntity.class);
        assertNotNull(singleEntity2);
        assertEquals("type1", singleEntity2.getType());
        assertEquals("entity1", singleEntity2.getId());
    } finally {
        client.destroy();
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) Set(java.util.Set) HashSet(java.util.HashSet) Client(com.sun.jersey.api.client.Client) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) URI(java.net.URI) Test(org.junit.Test)

Aggregations

Set (java.util.Set)6789 HashSet (java.util.HashSet)4372 HashMap (java.util.HashMap)2090 Map (java.util.Map)1865 Iterator (java.util.Iterator)1774 ArrayList (java.util.ArrayList)1113 List (java.util.List)980 Test (org.junit.Test)920 TreeSet (java.util.TreeSet)536 IOException (java.io.IOException)501 SSOException (com.iplanet.sso.SSOException)467 LinkedHashSet (java.util.LinkedHashSet)418 SMSException (com.sun.identity.sm.SMSException)347 IdRepoException (com.sun.identity.idm.IdRepoException)268 Collection (java.util.Collection)259 ImmutableSet (com.google.common.collect.ImmutableSet)256 File (java.io.File)245 SSOToken (com.iplanet.sso.SSOToken)226 Collectors (java.util.stream.Collectors)219 Test (org.testng.annotations.Test)209