Search in sources :

Example 81 with Set

use of java.util.Set in project hbase by apache.

the class ReplicationZKNodeCleaner method getUnDeletedQueues.

/**
   * @return undeletedQueues replicator with its queueIds for removed peers
   * @throws IOException
   */
public Map<String, List<String>> getUnDeletedQueues() throws IOException {
    Map<String, List<String>> undeletedQueues = new HashMap<>();
    Set<String> peerIds = new HashSet<>(this.replicationPeers.getAllPeerIds());
    try {
        List<String> replicators = this.queuesClient.getListOfReplicators();
        for (String replicator : replicators) {
            List<String> queueIds = this.queuesClient.getAllQueues(replicator);
            for (String queueId : queueIds) {
                ReplicationQueueInfo queueInfo = new ReplicationQueueInfo(queueId);
                if (!peerIds.contains(queueInfo.getPeerId())) {
                    undeletedQueues.computeIfAbsent(replicator, (key) -> new ArrayList<>()).add(queueId);
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("Undeleted replication queue for removed peer found: " + String.format("[removedPeerId=%s, replicator=%s, queueId=%s]", queueInfo.getPeerId(), replicator, queueId));
                    }
                }
            }
        }
    } catch (KeeperException ke) {
        throw new IOException("Failed to get the replication queues of all replicators", ke);
    }
    return undeletedQueues;
}
Also used : KeeperException(org.apache.zookeeper.KeeperException) ZKUtil(org.apache.hadoop.hbase.zookeeper.ZKUtil) Abortable(org.apache.hadoop.hbase.Abortable) ReplicationQueuesClientArguments(org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments) Set(java.util.Set) ReplicationFactory(org.apache.hadoop.hbase.replication.ReplicationFactory) IOException(java.io.IOException) HashMap(java.util.HashMap) ReplicationPeers(org.apache.hadoop.hbase.replication.ReplicationPeers) ReplicationStateZKBase(org.apache.hadoop.hbase.replication.ReplicationStateZKBase) ZooKeeperWatcher(org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) List(java.util.List) ReplicationQueueInfo(org.apache.hadoop.hbase.replication.ReplicationQueueInfo) ReplicationQueuesClient(org.apache.hadoop.hbase.replication.ReplicationQueuesClient) Map(java.util.Map) Configuration(org.apache.hadoop.conf.Configuration) Entry(java.util.Map.Entry) Log(org.apache.commons.logging.Log) LogFactory(org.apache.commons.logging.LogFactory) InterfaceAudience(org.apache.hadoop.hbase.classification.InterfaceAudience) HashMap(java.util.HashMap) ReplicationQueueInfo(org.apache.hadoop.hbase.replication.ReplicationQueueInfo) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) IOException(java.io.IOException) KeeperException(org.apache.zookeeper.KeeperException) HashSet(java.util.HashSet)

Example 82 with Set

use of java.util.Set in project hbase by apache.

the class RegionStates method getTableRSRegionMap.

private Map<TableName, Map<ServerName, List<HRegionInfo>>> getTableRSRegionMap(Boolean bytable) {
    Map<TableName, Map<ServerName, List<HRegionInfo>>> result = new HashMap<>();
    for (Map.Entry<ServerName, Set<HRegionInfo>> e : serverHoldings.entrySet()) {
        for (HRegionInfo hri : e.getValue()) {
            if (hri.isMetaRegion())
                continue;
            TableName tablename = bytable ? hri.getTable() : TableName.valueOf(HConstants.ENSEMBLE_TABLE_NAME);
            Map<ServerName, List<HRegionInfo>> svrToRegions = result.get(tablename);
            if (svrToRegions == null) {
                svrToRegions = new HashMap<>(serverHoldings.size());
                result.put(tablename, svrToRegions);
            }
            List<HRegionInfo> regions = svrToRegions.get(e.getKey());
            if (regions == null) {
                regions = new ArrayList<>();
                svrToRegions.put(e.getKey(), regions);
            }
            regions.add(hri);
        }
    }
    return result;
}
Also used : SortedSet(java.util.SortedSet) TreeSet(java.util.TreeSet) HashSet(java.util.HashSet) Set(java.util.Set) HashMap(java.util.HashMap) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) TableName(org.apache.hadoop.hbase.TableName) ServerName(org.apache.hadoop.hbase.ServerName) ArrayList(java.util.ArrayList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) TreeMap(java.util.TreeMap)

Example 83 with Set

use of java.util.Set in project hadoop by apache.

the class FileSystemTimelineReaderImpl method getEntities.

private Set<TimelineEntity> getEntities(File dir, String entityType, TimelineEntityFilters filters, TimelineDataToRetrieve dataToRetrieve) throws IOException {
    // First sort the selected entities based on created/start time.
    Map<Long, Set<TimelineEntity>> sortedEntities = new TreeMap<>(new Comparator<Long>() {

        @Override
        public int compare(Long l1, Long l2) {
            return l2.compareTo(l1);
        }
    });
    for (File entityFile : dir.listFiles()) {
        if (!entityFile.getName().contains(TIMELINE_SERVICE_STORAGE_EXTENSION)) {
            continue;
        }
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(entityFile), Charset.forName("UTF-8")))) {
            TimelineEntity entity = readEntityFromFile(reader);
            if (!entity.getType().equals(entityType)) {
                continue;
            }
            if (!isTimeInRange(entity.getCreatedTime(), filters.getCreatedTimeBegin(), filters.getCreatedTimeEnd())) {
                continue;
            }
            if (filters.getRelatesTo() != null && !filters.getRelatesTo().getFilterList().isEmpty() && !TimelineStorageUtils.matchRelatesTo(entity, filters.getRelatesTo())) {
                continue;
            }
            if (filters.getIsRelatedTo() != null && !filters.getIsRelatedTo().getFilterList().isEmpty() && !TimelineStorageUtils.matchIsRelatedTo(entity, filters.getIsRelatedTo())) {
                continue;
            }
            if (filters.getInfoFilters() != null && !filters.getInfoFilters().getFilterList().isEmpty() && !TimelineStorageUtils.matchInfoFilters(entity, filters.getInfoFilters())) {
                continue;
            }
            if (filters.getConfigFilters() != null && !filters.getConfigFilters().getFilterList().isEmpty() && !TimelineStorageUtils.matchConfigFilters(entity, filters.getConfigFilters())) {
                continue;
            }
            if (filters.getMetricFilters() != null && !filters.getMetricFilters().getFilterList().isEmpty() && !TimelineStorageUtils.matchMetricFilters(entity, filters.getMetricFilters())) {
                continue;
            }
            if (filters.getEventFilters() != null && !filters.getEventFilters().getFilterList().isEmpty() && !TimelineStorageUtils.matchEventFilters(entity, filters.getEventFilters())) {
                continue;
            }
            TimelineEntity entityToBeReturned = createEntityToBeReturned(entity, dataToRetrieve.getFieldsToRetrieve());
            Set<TimelineEntity> entitiesCreatedAtSameTime = sortedEntities.get(entityToBeReturned.getCreatedTime());
            if (entitiesCreatedAtSameTime == null) {
                entitiesCreatedAtSameTime = new HashSet<TimelineEntity>();
            }
            entitiesCreatedAtSameTime.add(entityToBeReturned);
            sortedEntities.put(entityToBeReturned.getCreatedTime(), entitiesCreatedAtSameTime);
        }
    }
    Set<TimelineEntity> entities = new HashSet<TimelineEntity>();
    long entitiesAdded = 0;
    for (Set<TimelineEntity> entitySet : sortedEntities.values()) {
        for (TimelineEntity entity : entitySet) {
            entities.add(entity);
            ++entitiesAdded;
            if (entitiesAdded >= filters.getLimit()) {
                return entities;
            }
        }
    }
    return entities;
}
Also used : HashSet(java.util.HashSet) EnumSet(java.util.EnumSet) Set(java.util.Set) InputStreamReader(java.io.InputStreamReader) TreeMap(java.util.TreeMap) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) FileInputStream(java.io.FileInputStream) BufferedReader(java.io.BufferedReader) File(java.io.File) HashSet(java.util.HashSet)

Example 84 with Set

use of java.util.Set in project hadoop by apache.

the class FileSystemTimelineReaderImpl method mergeEntities.

private static void mergeEntities(TimelineEntity entity1, TimelineEntity entity2) {
    // Ideally created time wont change except in the case of issue from client.
    if (entity2.getCreatedTime() != null && entity2.getCreatedTime() > 0) {
        entity1.setCreatedTime(entity2.getCreatedTime());
    }
    for (Entry<String, String> configEntry : entity2.getConfigs().entrySet()) {
        entity1.addConfig(configEntry.getKey(), configEntry.getValue());
    }
    for (Entry<String, Object> infoEntry : entity2.getInfo().entrySet()) {
        entity1.addInfo(infoEntry.getKey(), infoEntry.getValue());
    }
    for (Entry<String, Set<String>> isRelatedToEntry : entity2.getIsRelatedToEntities().entrySet()) {
        String type = isRelatedToEntry.getKey();
        for (String entityId : isRelatedToEntry.getValue()) {
            entity1.addIsRelatedToEntity(type, entityId);
        }
    }
    for (Entry<String, Set<String>> relatesToEntry : entity2.getRelatesToEntities().entrySet()) {
        String type = relatesToEntry.getKey();
        for (String entityId : relatesToEntry.getValue()) {
            entity1.addRelatesToEntity(type, entityId);
        }
    }
    for (TimelineEvent event : entity2.getEvents()) {
        entity1.addEvent(event);
    }
    for (TimelineMetric metric2 : entity2.getMetrics()) {
        boolean found = false;
        for (TimelineMetric metric1 : entity1.getMetrics()) {
            if (metric1.getId().equals(metric2.getId())) {
                metric1.addValues(metric2.getValues());
                found = true;
                break;
            }
        }
        if (!found) {
            entity1.addMetric(metric2);
        }
    }
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent) TimelineMetric(org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric) HashSet(java.util.HashSet) EnumSet(java.util.EnumSet) Set(java.util.Set)

Example 85 with Set

use of java.util.Set in project hadoop by apache.

the class TestTimelineReaderWebServices method testGetEntitiesWithLimit.

@Test
public void testGetEntitiesWithLimit() throws Exception {
    Client client = createClient();
    try {
        URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/app1/entities/app?limit=2");
        ClientResponse resp = getResponse(client, uri);
        Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, resp.getType().toString());
        assertNotNull(entities);
        assertEquals(2, entities.size());
        // Entities returned are based on most recent created time.
        assertTrue("Entities with id_1 and id_4 should have been present " + "in response based on entity created time.", entities.contains(newEntity("app", "id_1")) && entities.contains(newEntity("app", "id_4")));
        uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" + "clusters/cluster1/apps/app1/entities/app?limit=3");
        resp = getResponse(client, uri);
        entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
        });
        assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, resp.getType().toString());
        assertNotNull(entities);
        // Even though 2 entities out of 4 have same created time, one entity
        // is left out due to limit
        assertEquals(3, entities.size());
    } finally {
        client.destroy();
    }
}
Also used : ClientResponse(com.sun.jersey.api.client.ClientResponse) GenericType(com.sun.jersey.api.client.GenericType) Set(java.util.Set) Client(com.sun.jersey.api.client.Client) TimelineEntity(org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity) URI(java.net.URI) Test(org.junit.Test)

Aggregations

Set (java.util.Set)6789 HashSet (java.util.HashSet)4372 HashMap (java.util.HashMap)2090 Map (java.util.Map)1865 Iterator (java.util.Iterator)1774 ArrayList (java.util.ArrayList)1113 List (java.util.List)980 Test (org.junit.Test)920 TreeSet (java.util.TreeSet)536 IOException (java.io.IOException)501 SSOException (com.iplanet.sso.SSOException)467 LinkedHashSet (java.util.LinkedHashSet)418 SMSException (com.sun.identity.sm.SMSException)347 IdRepoException (com.sun.identity.idm.IdRepoException)268 Collection (java.util.Collection)259 ImmutableSet (com.google.common.collect.ImmutableSet)256 File (java.io.File)245 SSOToken (com.iplanet.sso.SSOToken)226 Collectors (java.util.stream.Collectors)219 Test (org.testng.annotations.Test)209