Search in sources :

Example 31 with Set

use of java.util.Set in project hadoop by apache.

the class RMWebServices method replaceLabelsOnNode.

@POST
@Path("/nodes/{nodeId}/replace-labels")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public Response replaceLabelsOnNode(@QueryParam("labels") Set<String> newNodeLabelsName, @Context HttpServletRequest hsr, @PathParam("nodeId") String nodeId) throws Exception {
    NodeId nid = ConverterUtils.toNodeIdWithDefaultPort(nodeId);
    Map<NodeId, Set<String>> newLabelsForNode = new HashMap<NodeId, Set<String>>();
    newLabelsForNode.put(nid, new HashSet<String>(newNodeLabelsName));
    return replaceLabelsOnNode(newLabelsForNode, hsr, "/nodes/nodeid/replace-labels");
}
Also used : EnumSet(java.util.EnumSet) Set(java.util.Set) HashSet(java.util.HashSet) HashMap(java.util.HashMap) NodeId(org.apache.hadoop.yarn.api.records.NodeId) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST) Produces(javax.ws.rs.Produces)

Example 32 with Set

use of java.util.Set in project hadoop by apache.

the class RMWebServices method getNodeToLabels.

@GET
@Path("/get-node-to-labels")
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public NodeToLabelsInfo getNodeToLabels(@Context HttpServletRequest hsr) throws IOException {
    init();
    NodeToLabelsInfo ntl = new NodeToLabelsInfo();
    HashMap<String, NodeLabelsInfo> ntlMap = ntl.getNodeToLabels();
    Map<NodeId, Set<NodeLabel>> nodeIdToLabels = rm.getRMContext().getNodeLabelManager().getNodeLabelsInfo();
    for (Map.Entry<NodeId, Set<NodeLabel>> nitle : nodeIdToLabels.entrySet()) {
        List<NodeLabel> labels = new ArrayList<NodeLabel>(nitle.getValue());
        ntlMap.put(nitle.getKey().toString(), new NodeLabelsInfo(labels));
    }
    return ntl;
}
Also used : EnumSet(java.util.EnumSet) Set(java.util.Set) HashSet(java.util.HashSet) NodeLabel(org.apache.hadoop.yarn.api.records.NodeLabel) NodeLabelsInfo(org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeLabelsInfo) NodeId(org.apache.hadoop.yarn.api.records.NodeId) ArrayList(java.util.ArrayList) NodeToLabelsInfo(org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeToLabelsInfo) Map(java.util.Map) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 33 with Set

use of java.util.Set in project hadoop by apache.

the class GetLabelsToNodesResponsePBImpl method initLabelsToNodes.

private void initLabelsToNodes() {
    if (this.labelsToNodes != null) {
        return;
    }
    GetLabelsToNodesResponseProtoOrBuilder p = viaProto ? proto : builder;
    List<LabelsToNodeIdsProto> list = p.getLabelsToNodesList();
    this.labelsToNodes = new HashMap<String, Set<NodeId>>();
    for (LabelsToNodeIdsProto c : list) {
        Set<NodeId> setNodes = new HashSet<NodeId>();
        for (NodeIdProto n : c.getNodeIdList()) {
            NodeId node = new NodeIdPBImpl(n);
            setNodes.add(node);
        }
        if (!setNodes.isEmpty()) {
            this.labelsToNodes.put(c.getNodeLabels(), setNodes);
        }
    }
}
Also used : GetLabelsToNodesResponseProtoOrBuilder(org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProtoOrBuilder) Set(java.util.Set) HashSet(java.util.HashSet) NodeId(org.apache.hadoop.yarn.api.records.NodeId) NodeIdPBImpl(org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl) NodeIdProto(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto) LabelsToNodeIdsProto(org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto) HashSet(java.util.HashSet)

Example 34 with Set

use of java.util.Set in project hadoop by apache.

the class GetLabelsToNodesResponsePBImpl method addLabelsToNodesToProto.

private void addLabelsToNodesToProto() {
    maybeInitBuilder();
    builder.clearLabelsToNodes();
    if (labelsToNodes == null) {
        return;
    }
    Iterable<LabelsToNodeIdsProto> iterable = new Iterable<LabelsToNodeIdsProto>() {

        @Override
        public Iterator<LabelsToNodeIdsProto> iterator() {
            return new Iterator<LabelsToNodeIdsProto>() {

                Iterator<Entry<String, Set<NodeId>>> iter = labelsToNodes.entrySet().iterator();

                @Override
                public void remove() {
                    throw new UnsupportedOperationException();
                }

                @Override
                public LabelsToNodeIdsProto next() {
                    Entry<String, Set<NodeId>> now = iter.next();
                    Set<NodeIdProto> nodeProtoSet = new HashSet<NodeIdProto>();
                    for (NodeId n : now.getValue()) {
                        nodeProtoSet.add(convertToProtoFormat(n));
                    }
                    return LabelsToNodeIdsProto.newBuilder().setNodeLabels(now.getKey()).addAllNodeId(nodeProtoSet).build();
                }

                @Override
                public boolean hasNext() {
                    return iter.hasNext();
                }
            };
        }
    };
    builder.addAllLabelsToNodes(iterable);
}
Also used : Set(java.util.Set) HashSet(java.util.HashSet) Iterator(java.util.Iterator) NodeId(org.apache.hadoop.yarn.api.records.NodeId) NodeIdProto(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto) LabelsToNodeIdsProto(org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto) HashSet(java.util.HashSet)

Example 35 with Set

use of java.util.Set in project hadoop by apache.

the class RollingLevelDBTimelineStore method putEntities.

/**
   * Put a single entity. If there is an error, add a TimelinePutError to the
   * given response.
   *
   * @param entityUpdates
   *          a map containing all the scheduled writes for this put to the
   *          entity db
   * @param indexUpdates
   *          a map containing all the scheduled writes for this put to the
   *          index db
   */
private long putEntities(TreeMap<Long, RollingWriteBatch> entityUpdates, TreeMap<Long, RollingWriteBatch> indexUpdates, TimelineEntity entity, TimelinePutResponse response) {
    long putCount = 0;
    List<EntityIdentifier> relatedEntitiesWithoutStartTimes = new ArrayList<EntityIdentifier>();
    byte[] revStartTime = null;
    Map<String, Set<Object>> primaryFilters = null;
    try {
        List<TimelineEvent> events = entity.getEvents();
        // look up the start time for the entity
        Long startTime = getAndSetStartTime(entity.getEntityId(), entity.getEntityType(), entity.getStartTime(), events);
        if (startTime == null) {
            // if no start time is found, add an error and return
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.NO_START_TIME);
            response.addError(error);
            return putCount;
        }
        // Must have a domain
        if (StringUtils.isEmpty(entity.getDomainId())) {
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.NO_DOMAIN);
            response.addError(error);
            return putCount;
        }
        revStartTime = writeReverseOrderedLong(startTime);
        long roundedStartTime = entitydb.computeCurrentCheckMillis(startTime);
        RollingWriteBatch rollingWriteBatch = entityUpdates.get(roundedStartTime);
        if (rollingWriteBatch == null) {
            DB db = entitydb.getDBForStartTime(startTime);
            if (db != null) {
                WriteBatch writeBatch = db.createWriteBatch();
                rollingWriteBatch = new RollingWriteBatch(db, writeBatch);
                entityUpdates.put(roundedStartTime, rollingWriteBatch);
            }
        }
        if (rollingWriteBatch == null) {
            // if no start time is found, add an error and return
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
            response.addError(error);
            return putCount;
        }
        WriteBatch writeBatch = rollingWriteBatch.getWriteBatch();
        // Save off the getBytes conversion to avoid unnecessary cost
        byte[] entityIdBytes = entity.getEntityId().getBytes(UTF_8);
        byte[] entityTypeBytes = entity.getEntityType().getBytes(UTF_8);
        byte[] domainIdBytes = entity.getDomainId().getBytes(UTF_8);
        // write entity marker
        byte[] markerKey = KeyBuilder.newInstance(3).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).getBytesForLookup();
        writeBatch.put(markerKey, EMPTY_BYTES);
        ++putCount;
        // write domain id entry
        byte[] domainkey = KeyBuilder.newInstance(4).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(DOMAIN_ID_COLUMN).getBytes();
        writeBatch.put(domainkey, domainIdBytes);
        ++putCount;
        // write event entries
        if (events != null) {
            for (TimelineEvent event : events) {
                byte[] revts = writeReverseOrderedLong(event.getTimestamp());
                byte[] key = KeyBuilder.newInstance().add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(EVENTS_COLUMN).add(revts).add(event.getEventType().getBytes(UTF_8)).getBytes();
                byte[] value = fstConf.asByteArray(event.getEventInfo());
                writeBatch.put(key, value);
                ++putCount;
            }
        }
        // write primary filter entries
        primaryFilters = entity.getPrimaryFilters();
        if (primaryFilters != null) {
            for (Entry<String, Set<Object>> primaryFilter : primaryFilters.entrySet()) {
                for (Object primaryFilterValue : primaryFilter.getValue()) {
                    byte[] key = KeyBuilder.newInstance(6).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(PRIMARY_FILTERS_COLUMN).add(primaryFilter.getKey()).add(fstConf.asByteArray(primaryFilterValue)).getBytes();
                    writeBatch.put(key, EMPTY_BYTES);
                    ++putCount;
                }
            }
        }
        // write other info entries
        Map<String, Object> otherInfo = entity.getOtherInfo();
        if (otherInfo != null) {
            for (Entry<String, Object> info : otherInfo.entrySet()) {
                byte[] key = KeyBuilder.newInstance(5).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(OTHER_INFO_COLUMN).add(info.getKey()).getBytes();
                byte[] value = fstConf.asByteArray(info.getValue());
                writeBatch.put(key, value);
                ++putCount;
            }
        }
        // write related entity entries
        Map<String, Set<String>> relatedEntities = entity.getRelatedEntities();
        if (relatedEntities != null) {
            for (Entry<String, Set<String>> relatedEntityList : relatedEntities.entrySet()) {
                String relatedEntityType = relatedEntityList.getKey();
                for (String relatedEntityId : relatedEntityList.getValue()) {
                    // look up start time of related entity
                    Long relatedStartTimeLong = getStartTimeLong(relatedEntityId, relatedEntityType);
                    // delay writing the related entity if no start time is found
                    if (relatedStartTimeLong == null) {
                        relatedEntitiesWithoutStartTimes.add(new EntityIdentifier(relatedEntityId, relatedEntityType));
                        continue;
                    }
                    byte[] relatedEntityStartTime = writeReverseOrderedLong(relatedStartTimeLong);
                    long relatedRoundedStartTime = entitydb.computeCurrentCheckMillis(relatedStartTimeLong);
                    RollingWriteBatch relatedRollingWriteBatch = entityUpdates.get(relatedRoundedStartTime);
                    if (relatedRollingWriteBatch == null) {
                        DB db = entitydb.getDBForStartTime(relatedStartTimeLong);
                        if (db != null) {
                            WriteBatch relatedWriteBatch = db.createWriteBatch();
                            relatedRollingWriteBatch = new RollingWriteBatch(db, relatedWriteBatch);
                            entityUpdates.put(relatedRoundedStartTime, relatedRollingWriteBatch);
                        }
                    }
                    if (relatedRollingWriteBatch == null) {
                        // if no start time is found, add an error and return
                        TimelinePutError error = new TimelinePutError();
                        error.setEntityId(entity.getEntityId());
                        error.setEntityType(entity.getEntityType());
                        error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
                        response.addError(error);
                        continue;
                    }
                    // This is the existing entity
                    byte[] relatedDomainIdBytes = relatedRollingWriteBatch.getDB().get(createDomainIdKey(relatedEntityId, relatedEntityType, relatedEntityStartTime));
                    // The timeline data created by the server before 2.6 won't have
                    // the domain field. We assume this timeline data is in the
                    // default timeline domain.
                    String domainId = null;
                    if (relatedDomainIdBytes == null) {
                        domainId = TimelineDataManager.DEFAULT_DOMAIN_ID;
                    } else {
                        domainId = new String(relatedDomainIdBytes, UTF_8);
                    }
                    if (!domainId.equals(entity.getDomainId())) {
                        // in this case the entity will be put, but the relation will be
                        // ignored
                        TimelinePutError error = new TimelinePutError();
                        error.setEntityId(entity.getEntityId());
                        error.setEntityType(entity.getEntityType());
                        error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION);
                        response.addError(error);
                        continue;
                    }
                    // write "forward" entry (related entity -> entity)
                    byte[] key = createRelatedEntityKey(relatedEntityId, relatedEntityType, relatedEntityStartTime, entity.getEntityId(), entity.getEntityType());
                    WriteBatch relatedWriteBatch = relatedRollingWriteBatch.getWriteBatch();
                    relatedWriteBatch.put(key, EMPTY_BYTES);
                    ++putCount;
                }
            }
        }
        // write index entities
        RollingWriteBatch indexRollingWriteBatch = indexUpdates.get(roundedStartTime);
        if (indexRollingWriteBatch == null) {
            DB db = indexdb.getDBForStartTime(startTime);
            if (db != null) {
                WriteBatch indexWriteBatch = db.createWriteBatch();
                indexRollingWriteBatch = new RollingWriteBatch(db, indexWriteBatch);
                indexUpdates.put(roundedStartTime, indexRollingWriteBatch);
            }
        }
        if (indexRollingWriteBatch == null) {
            // if no start time is found, add an error and return
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
            response.addError(error);
            return putCount;
        }
        WriteBatch indexWriteBatch = indexRollingWriteBatch.getWriteBatch();
        putCount += writePrimaryFilterEntries(indexWriteBatch, primaryFilters, markerKey, EMPTY_BYTES);
    } catch (IOException e) {
        LOG.error("Error putting entity " + entity.getEntityId() + " of type " + entity.getEntityType(), e);
        TimelinePutError error = new TimelinePutError();
        error.setEntityId(entity.getEntityId());
        error.setEntityType(entity.getEntityType());
        error.setErrorCode(TimelinePutError.IO_EXCEPTION);
        response.addError(error);
    }
    for (EntityIdentifier relatedEntity : relatedEntitiesWithoutStartTimes) {
        try {
            Long relatedEntityStartAndInsertTime = getAndSetStartTime(relatedEntity.getId(), relatedEntity.getType(), readReverseOrderedLong(revStartTime, 0), null);
            if (relatedEntityStartAndInsertTime == null) {
                throw new IOException("Error setting start time for related entity");
            }
            long relatedStartTimeLong = relatedEntityStartAndInsertTime;
            long relatedRoundedStartTime = entitydb.computeCurrentCheckMillis(relatedStartTimeLong);
            RollingWriteBatch relatedRollingWriteBatch = entityUpdates.get(relatedRoundedStartTime);
            if (relatedRollingWriteBatch == null) {
                DB db = entitydb.getDBForStartTime(relatedStartTimeLong);
                if (db != null) {
                    WriteBatch relatedWriteBatch = db.createWriteBatch();
                    relatedRollingWriteBatch = new RollingWriteBatch(db, relatedWriteBatch);
                    entityUpdates.put(relatedRoundedStartTime, relatedRollingWriteBatch);
                }
            }
            if (relatedRollingWriteBatch == null) {
                // if no start time is found, add an error and return
                TimelinePutError error = new TimelinePutError();
                error.setEntityId(entity.getEntityId());
                error.setEntityType(entity.getEntityType());
                error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
                response.addError(error);
                continue;
            }
            WriteBatch relatedWriteBatch = relatedRollingWriteBatch.getWriteBatch();
            byte[] relatedEntityStartTime = writeReverseOrderedLong(relatedEntityStartAndInsertTime);
            // This is the new entity, the domain should be the same
            byte[] key = createDomainIdKey(relatedEntity.getId(), relatedEntity.getType(), relatedEntityStartTime);
            relatedWriteBatch.put(key, entity.getDomainId().getBytes(UTF_8));
            ++putCount;
            relatedWriteBatch.put(createRelatedEntityKey(relatedEntity.getId(), relatedEntity.getType(), relatedEntityStartTime, entity.getEntityId(), entity.getEntityType()), EMPTY_BYTES);
            ++putCount;
            relatedWriteBatch.put(createEntityMarkerKey(relatedEntity.getId(), relatedEntity.getType(), relatedEntityStartTime), EMPTY_BYTES);
            ++putCount;
        } catch (IOException e) {
            LOG.error("Error putting related entity " + relatedEntity.getId() + " of type " + relatedEntity.getType() + " for entity " + entity.getEntityId() + " of type " + entity.getEntityType(), e);
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.IO_EXCEPTION);
            response.addError(error);
        }
    }
    return putCount;
}
Also used : TimelineEvent(org.apache.hadoop.yarn.api.records.timeline.TimelineEvent) SortedSet(java.util.SortedSet) EnumSet(java.util.EnumSet) Set(java.util.Set) RollingWriteBatch(org.apache.hadoop.yarn.server.timeline.RollingLevelDB.RollingWriteBatch) ArrayList(java.util.ArrayList) IOException(java.io.IOException) TimelinePutError(org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError) GenericObjectMapper.writeReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.writeReverseOrderedLong) GenericObjectMapper.readReverseOrderedLong(org.apache.hadoop.yarn.server.timeline.GenericObjectMapper.readReverseOrderedLong) RollingWriteBatch(org.apache.hadoop.yarn.server.timeline.RollingLevelDB.RollingWriteBatch) WriteBatch(org.iq80.leveldb.WriteBatch) DB(org.iq80.leveldb.DB)

Aggregations

Set (java.util.Set)6789 HashSet (java.util.HashSet)4372 HashMap (java.util.HashMap)2090 Map (java.util.Map)1865 Iterator (java.util.Iterator)1774 ArrayList (java.util.ArrayList)1113 List (java.util.List)980 Test (org.junit.Test)920 TreeSet (java.util.TreeSet)536 IOException (java.io.IOException)501 SSOException (com.iplanet.sso.SSOException)467 LinkedHashSet (java.util.LinkedHashSet)418 SMSException (com.sun.identity.sm.SMSException)347 IdRepoException (com.sun.identity.idm.IdRepoException)268 Collection (java.util.Collection)259 ImmutableSet (com.google.common.collect.ImmutableSet)256 File (java.io.File)245 SSOToken (com.iplanet.sso.SSOToken)226 Collectors (java.util.stream.Collectors)219 Test (org.testng.annotations.Test)209