Search in sources :

Example 11 with DiskBalancerDataNode

use of org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode in project hadoop by apache.

the class Command method getNode.

/**
   * Returns a DiskBalancer Node from the Cluster or null if not found.
   *
   * @param nodeName - can the hostname, IP address or UUID of the node.
   * @return - DataNode if found.
   */
DiskBalancerDataNode getNode(String nodeName) {
    DiskBalancerDataNode node = null;
    if (nodeName == null || nodeName.isEmpty()) {
        return node;
    }
    if (cluster.getNodes().size() == 0) {
        return node;
    }
    node = cluster.getNodeByName(nodeName);
    if (node != null) {
        return node;
    }
    node = cluster.getNodeByIPAddress(nodeName);
    if (node != null) {
        return node;
    }
    node = cluster.getNodeByUUID(nodeName);
    return node;
}
Also used : DiskBalancerDataNode(org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode)

Example 12 with DiskBalancerDataNode

use of org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode in project hadoop by apache.

the class DBNameNodeConnector method getNodes.

/**
   * getNodes function returns a list of DiskBalancerDataNodes.
   *
   * @return Array of DiskBalancerDataNodes
   */
@Override
public List<DiskBalancerDataNode> getNodes() throws Exception {
    Preconditions.checkNotNull(this.connector);
    List<DiskBalancerDataNode> nodeList = new LinkedList<>();
    DatanodeStorageReport[] reports = this.connector.getLiveDatanodeStorageReport();
    for (DatanodeStorageReport report : reports) {
        DiskBalancerDataNode datanode = getBalancerNodeFromDataNode(report.getDatanodeInfo());
        getVolumeInfoFromStorageReports(datanode, report.getStorageReports());
        nodeList.add(datanode);
    }
    return nodeList;
}
Also used : DatanodeStorageReport(org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport) LinkedList(java.util.LinkedList) DiskBalancerDataNode(org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode)

Example 13 with DiskBalancerDataNode

use of org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode in project hadoop by apache.

the class ReportCommand method handleNodeReport.

private void handleNodeReport(final CommandLine cmd, StrBuilder result, final String nodeFormat, final String volumeFormat) throws Exception {
    String outputLine = "";
    /*
     * get value that identifies DataNode(s) from command line, it could be
     * UUID, IP address or host name.
     */
    final String nodeVal = cmd.getOptionValue(DiskBalancerCLI.NODE);
    if (StringUtils.isBlank(nodeVal)) {
        outputLine = "The value for '-node' is neither specified or empty.";
        recordOutput(result, outputLine);
    } else {
        /*
       * Reporting volume information for specific DataNode(s)
       */
        outputLine = String.format("Reporting volume information for DataNode(s) '%s'.", nodeVal);
        recordOutput(result, outputLine);
        List<DiskBalancerDataNode> dbdns = Lists.newArrayList();
        try {
            dbdns = getNodes(nodeVal);
        } catch (DiskBalancerException e) {
            // If there are some invalid nodes that contained in nodeVal,
            // the exception will be threw.
            recordOutput(result, e.getMessage());
            return;
        }
        if (!dbdns.isEmpty()) {
            for (DiskBalancerDataNode node : dbdns) {
                recordNodeReport(result, node, nodeFormat, volumeFormat);
                result.append(System.lineSeparator());
            }
        }
    }
}
Also used : DiskBalancerException(org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerException) DiskBalancerDataNode(org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode)

Example 14 with DiskBalancerDataNode

use of org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode in project hadoop by apache.

the class ReportCommand method handleTopReport.

private void handleTopReport(final CommandLine cmd, final StrBuilder result, final String nodeFormat) {
    Collections.sort(getCluster().getNodes(), Collections.reverseOrder());
    /* extract value that identifies top X DataNode(s) */
    setTopNodes(parseTopNodes(cmd, result));
    /*
     * Reporting volume information of top X DataNode(s) in summary
     */
    final String outputLine = String.format("Reporting top %d DataNode(s) benefiting from running DiskBalancer.", getTopNodes());
    recordOutput(result, outputLine);
    ListIterator<DiskBalancerDataNode> li = getCluster().getNodes().listIterator();
    for (int i = 0; i < getTopNodes() && li.hasNext(); i++) {
        DiskBalancerDataNode dbdn = li.next();
        result.appendln(String.format(nodeFormat, i + 1, getTopNodes(), dbdn.getDataNodeName(), dbdn.getDataNodeIP(), dbdn.getDataNodePort(), dbdn.getDataNodeUUID(), dbdn.getVolumeCount(), dbdn.getNodeDataDensity()));
    }
}
Also used : DiskBalancerDataNode(org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode)

Example 15 with DiskBalancerDataNode

use of org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode in project hadoop by apache.

the class DiskBalancerTestUtil method createRandomDataNode.

/**
   * Creates a RandomDataNode.
   *
   * @param diskTypes - Storage types needed in the Node
   * @param diskCount - Disk count - that many disks of each type is created
   * @return DataNode
   * @throws Exception
   */
public DiskBalancerDataNode createRandomDataNode(StorageType[] diskTypes, int diskCount) throws Exception {
    Preconditions.checkState(diskTypes.length > 0);
    Preconditions.checkState(diskCount > 0);
    DiskBalancerDataNode node = new DiskBalancerDataNode(UUID.randomUUID().toString());
    for (StorageType t : diskTypes) {
        DiskBalancerVolumeSet vSet = createRandomVolumeSet(t, diskCount);
        for (DiskBalancerVolume v : vSet.getVolumes()) {
            node.addVolume(v);
        }
    }
    return node;
}
Also used : DiskBalancerVolumeSet(org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerVolumeSet) StorageType(org.apache.hadoop.fs.StorageType) DiskBalancerVolume(org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerVolume) DiskBalancerDataNode(org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode)

Aggregations

DiskBalancerDataNode (org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode)27 Test (org.junit.Test)19 DiskBalancerVolume (org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerVolume)16 DiskBalancerCluster (org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster)13 NodePlan (org.apache.hadoop.hdfs.server.diskbalancer.planner.NodePlan)12 GreedyPlanner (org.apache.hadoop.hdfs.server.diskbalancer.planner.GreedyPlanner)11 NullConnector (org.apache.hadoop.hdfs.server.diskbalancer.connectors.NullConnector)10 DiskBalancerVolumeSet (org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerVolumeSet)7 Step (org.apache.hadoop.hdfs.server.diskbalancer.planner.Step)5 ClusterConnector (org.apache.hadoop.hdfs.server.diskbalancer.connectors.ClusterConnector)3 LinkedList (java.util.LinkedList)2 DiskBalancerException (org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerException)2 URI (java.net.URI)1 StrBuilder (org.apache.commons.lang.text.StrBuilder)1 Configuration (org.apache.hadoop.conf.Configuration)1 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)1 Path (org.apache.hadoop.fs.Path)1 StorageType (org.apache.hadoop.fs.StorageType)1 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)1 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)1