Search in sources :

Example 11 with DiskErrorException

use of org.apache.hadoop.util.DiskChecker.DiskErrorException in project hadoop by apache.

the class TestDataNodeVolumeFailureToleration method testVolumeConfig.

/**
   * Tests for a given volumes to be tolerated and volumes failed.
   */
private void testVolumeConfig(int volumesTolerated, int volumesFailed, boolean expectedBPServiceState, boolean manageDfsDirs) throws IOException, InterruptedException {
    assumeNotWindows();
    final int dnIndex = 0;
    // Fail the current directory since invalid storage directory perms
    // get fixed up automatically on datanode startup.
    File[] dirs = { new File(cluster.getInstanceStorageDir(dnIndex, 0), "current"), new File(cluster.getInstanceStorageDir(dnIndex, 1), "current") };
    try {
        for (int i = 0; i < volumesFailed; i++) {
            prepareDirToFail(dirs[i]);
        }
        restartDatanodes(volumesTolerated, manageDfsDirs);
    } catch (DiskErrorException e) {
        GenericTestUtils.assertExceptionContains("Invalid value configured for " + "dfs.datanode.failed.volumes.tolerated", e);
    } finally {
        boolean bpServiceState;
        // because the invalid value configured for tolerated volumes
        if (cluster.getDataNodes().size() == 0) {
            bpServiceState = false;
        } else {
            bpServiceState = cluster.getDataNodes().get(0).isBPServiceAlive(cluster.getNamesystem().getBlockPoolId());
        }
        assertEquals(expectedBPServiceState, bpServiceState);
        for (File dir : dirs) {
            FileUtil.chmod(dir.toString(), "755");
        }
    }
}
Also used : DiskErrorException(org.apache.hadoop.util.DiskChecker.DiskErrorException) File(java.io.File)

Example 12 with DiskErrorException

use of org.apache.hadoop.util.DiskChecker.DiskErrorException in project hadoop by apache.

the class YarnChild method configureLocalDirs.

/**
   * Configure mapred-local dirs. This config is used by the task for finding
   * out an output directory.
   * @throws IOException 
   */
private static void configureLocalDirs(Task task, JobConf job) throws IOException {
    String[] localSysDirs = StringUtils.getTrimmedStrings(System.getenv(Environment.LOCAL_DIRS.name()));
    job.setStrings(MRConfig.LOCAL_DIR, localSysDirs);
    LOG.info(MRConfig.LOCAL_DIR + " for child: " + job.get(MRConfig.LOCAL_DIR));
    LocalDirAllocator lDirAlloc = new LocalDirAllocator(MRConfig.LOCAL_DIR);
    Path workDir = null;
    // First, try to find the JOB_LOCAL_DIR on this host.
    try {
        workDir = lDirAlloc.getLocalPathToRead("work", job);
    } catch (DiskErrorException e) {
    // DiskErrorException means dir not found. If not found, it will
    // be created below.
    }
    if (workDir == null) {
        // JOB_LOCAL_DIR doesn't exist on this host -- Create it.
        workDir = lDirAlloc.getLocalPathForWrite("work", job);
        FileSystem lfs = FileSystem.getLocal(job).getRaw();
        boolean madeDir = false;
        try {
            madeDir = lfs.mkdirs(workDir);
        } catch (FileAlreadyExistsException e) {
            // Since all tasks will be running in their own JVM, the race condition
            // exists where multiple tasks could be trying to create this directory
            // at the same time. If this task loses the race, it's okay because
            // the directory already exists.
            madeDir = true;
            workDir = lDirAlloc.getLocalPathToRead("work", job);
        }
        if (!madeDir) {
            throw new IOException("Mkdirs failed to create " + workDir.toString());
        }
    }
    job.set(MRJobConfig.JOB_LOCAL_DIR, workDir.toString());
}
Also used : Path(org.apache.hadoop.fs.Path) DiskErrorException(org.apache.hadoop.util.DiskChecker.DiskErrorException) FileSystem(org.apache.hadoop.fs.FileSystem) LocalDirAllocator(org.apache.hadoop.fs.LocalDirAllocator) IOException(java.io.IOException)

Aggregations

DiskErrorException (org.apache.hadoop.util.DiskChecker.DiskErrorException)12 IOException (java.io.IOException)4 File (java.io.File)3 Path (org.apache.hadoop.fs.Path)3 Test (org.junit.Test)3 ArrayList (java.util.ArrayList)2 Configuration (org.apache.hadoop.conf.Configuration)2 LocalFileSystem (org.apache.hadoop.fs.LocalFileSystem)2 StorageLocation (org.apache.hadoop.hdfs.server.datanode.StorageLocation)2 ListenableFuture (com.google.common.util.concurrent.ListenableFuture)1 ByteArrayInputStream (java.io.ByteArrayInputStream)1 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 DataOutputStream (java.io.DataOutputStream)1 Path (java.nio.file.Path)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 LinkedHashMap (java.util.LinkedHashMap)1 Map (java.util.Map)1 ExecutionException (java.util.concurrent.ExecutionException)1 TimeoutException (java.util.concurrent.TimeoutException)1