Search in sources :

Example 1 with InnerNode

use of org.apache.hadoop.net.InnerNode in project hadoop by apache.

the class DFSTopologyNodeImpl method add.

@Override
public boolean add(Node n) {
    if (!isAncestor(n)) {
        throw new IllegalArgumentException(n.getName() + ", which is located at " + n.getNetworkLocation() + ", is not a descendant of " + getPath(this));
    }
    // In HDFS topology, the leaf node should always be DatanodeDescriptor
    if (!(n instanceof DatanodeDescriptor)) {
        throw new IllegalArgumentException("Unexpected node type " + n.getClass().getName());
    }
    DatanodeDescriptor dnDescriptor = (DatanodeDescriptor) n;
    if (isParent(n)) {
        // this node is the parent of n; add n directly
        n.setParent(this);
        n.setLevel(this.level + 1);
        Node prev = childrenMap.put(n.getName(), n);
        if (prev != null) {
            for (int i = 0; i < children.size(); i++) {
                if (children.get(i).getName().equals(n.getName())) {
                    children.set(i, n);
                    return false;
                }
            }
        }
        children.add(n);
        numOfLeaves++;
        if (!childrenStorageInfo.containsKey(dnDescriptor.getName())) {
            childrenStorageInfo.put(dnDescriptor.getName(), new EnumMap<>(StorageType.class));
        }
        for (StorageType st : dnDescriptor.getStorageTypes()) {
            childrenStorageInfo.get(dnDescriptor.getName()).put(st, 1);
            incStorageTypeCount(st);
        }
        return true;
    } else {
        // find the next ancestor node
        String parentName = getNextAncestorName(n);
        InnerNode parentNode = (InnerNode) childrenMap.get(parentName);
        if (parentNode == null) {
            // create a new InnerNode
            parentNode = createParentNode(parentName);
            children.add(parentNode);
            childrenMap.put(parentNode.getName(), parentNode);
        }
        // add n to the subtree of the next ancestor node
        if (parentNode.add(n)) {
            numOfLeaves++;
            if (!childrenStorageInfo.containsKey(parentNode.getName())) {
                childrenStorageInfo.put(parentNode.getName(), new EnumMap<>(StorageType.class));
                for (StorageType st : dnDescriptor.getStorageTypes()) {
                    childrenStorageInfo.get(parentNode.getName()).put(st, 1);
                }
            } else {
                EnumMap<StorageType, Integer> currentCount = childrenStorageInfo.get(parentNode.getName());
                for (StorageType st : dnDescriptor.getStorageTypes()) {
                    if (currentCount.containsKey(st)) {
                        currentCount.put(st, currentCount.get(st) + 1);
                    } else {
                        currentCount.put(st, 1);
                    }
                }
            }
            for (StorageType st : dnDescriptor.getStorageTypes()) {
                incStorageTypeCount(st);
            }
            return true;
        } else {
            return false;
        }
    }
}
Also used : DatanodeDescriptor(org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor) StorageType(org.apache.hadoop.fs.StorageType) InnerNode(org.apache.hadoop.net.InnerNode) Node(org.apache.hadoop.net.Node) InnerNode(org.apache.hadoop.net.InnerNode)

Aggregations

StorageType (org.apache.hadoop.fs.StorageType)1 DatanodeDescriptor (org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor)1 InnerNode (org.apache.hadoop.net.InnerNode)1 Node (org.apache.hadoop.net.Node)1