use of org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto in project hadoop by apache.
the class GetLabelsToNodesResponsePBImpl method initLabelsToNodes.
private void initLabelsToNodes() {
if (this.labelsToNodes != null) {
return;
}
GetLabelsToNodesResponseProtoOrBuilder p = viaProto ? proto : builder;
List<LabelsToNodeIdsProto> list = p.getLabelsToNodesList();
this.labelsToNodes = new HashMap<String, Set<NodeId>>();
for (LabelsToNodeIdsProto c : list) {
Set<NodeId> setNodes = new HashSet<NodeId>();
for (NodeIdProto n : c.getNodeIdList()) {
NodeId node = new NodeIdPBImpl(n);
setNodes.add(node);
}
if (!setNodes.isEmpty()) {
this.labelsToNodes.put(c.getNodeLabels(), setNodes);
}
}
}
use of org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto in project hadoop by apache.
the class GetLabelsToNodesResponsePBImpl method addLabelsToNodesToProto.
private void addLabelsToNodesToProto() {
maybeInitBuilder();
builder.clearLabelsToNodes();
if (labelsToNodes == null) {
return;
}
Iterable<LabelsToNodeIdsProto> iterable = new Iterable<LabelsToNodeIdsProto>() {
@Override
public Iterator<LabelsToNodeIdsProto> iterator() {
return new Iterator<LabelsToNodeIdsProto>() {
Iterator<Entry<String, Set<NodeId>>> iter = labelsToNodes.entrySet().iterator();
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public LabelsToNodeIdsProto next() {
Entry<String, Set<NodeId>> now = iter.next();
Set<NodeIdProto> nodeProtoSet = new HashSet<NodeIdProto>();
for (NodeId n : now.getValue()) {
nodeProtoSet.add(convertToProtoFormat(n));
}
return LabelsToNodeIdsProto.newBuilder().setNodeLabels(now.getKey()).addAllNodeId(nodeProtoSet).build();
}
@Override
public boolean hasNext() {
return iter.hasNext();
}
};
}
};
builder.addAllLabelsToNodes(iterable);
}
use of org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto in project hadoop by apache.
the class CheckForDecommissioningNodesResponsePBImpl method addDecommissioningNodesToProto.
private void addDecommissioningNodesToProto() {
maybeInitBuilder();
builder.clearDecommissioningNodes();
if (this.decommissioningNodes == null)
return;
Set<NodeIdProto> nodeIdProtos = new HashSet<NodeIdProto>();
for (NodeId nodeId : decommissioningNodes) {
nodeIdProtos.add(convertToProtoFormat(nodeId));
}
builder.addAllDecommissioningNodes(nodeIdProtos);
}
use of org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto in project hadoop by apache.
the class CheckForDecommissioningNodesResponsePBImpl method initNodesDecommissioning.
private void initNodesDecommissioning() {
if (this.decommissioningNodes != null) {
return;
}
CheckForDecommissioningNodesResponseProtoOrBuilder p = viaProto ? proto : builder;
List<NodeIdProto> nodeIds = p.getDecommissioningNodesList();
this.decommissioningNodes = new HashSet<NodeId>();
for (NodeIdProto nodeIdProto : nodeIds) {
this.decommissioningNodes.add(convertFromProtoFormat(nodeIdProto));
}
}
use of org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto in project incubator-myriad by apache.
the class OfferLifeCycleManagerTest method setUp.
@Before
public void setUp() throws Exception {
NodeStore store = new NodeStore();
NodeIdProto nodeId = NodeIdProto.newBuilder().setHost("localhost").setPort(8000).build();
RMNode rmNode = new RMNodeImpl(new NodeIdPBImpl(nodeId), new MockRMContext(), "localhost", 8000, 8070, new NodeBase(), new ResourcePBImpl(), "1.0");
SchedulerNode node = new FiCaSchedulerNode(rmNode, false);
store.add(node);
manager = new OfferLifecycleManager(store, new MyriadDriver(new MockSchedulerDriver()));
}
Aggregations