Search in sources :

Example 11 with FileContext

use of org.apache.hadoop.fs.FileContext in project hadoop by apache.

the class TestFSDownload method testDownloadBadPublic.

@Test(timeout = 10000)
public void testDownloadBadPublic() throws IOException, URISyntaxException, InterruptedException {
    Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
    FileContext files = FileContext.getLocalFSFileContext(conf);
    final Path basedir = files.makeQualified(new Path("target", TestFSDownload.class.getSimpleName()));
    files.mkdir(basedir, null, true);
    conf.setStrings(TestFSDownload.class.getName(), basedir.toString());
    Map<LocalResource, LocalResourceVisibility> rsrcVis = new HashMap<LocalResource, LocalResourceVisibility>();
    Random rand = new Random();
    long sharedSeed = rand.nextLong();
    rand.setSeed(sharedSeed);
    System.out.println("SEED: " + sharedSeed);
    Map<LocalResource, Future<Path>> pending = new HashMap<LocalResource, Future<Path>>();
    ExecutorService exec = HadoopExecutors.newSingleThreadExecutor();
    LocalDirAllocator dirs = new LocalDirAllocator(TestFSDownload.class.getName());
    int size = 512;
    LocalResourceVisibility vis = LocalResourceVisibility.PUBLIC;
    Path path = new Path(basedir, "test-file");
    LocalResource rsrc = createFile(files, path, size, rand, vis);
    rsrcVis.put(rsrc, vis);
    Path destPath = dirs.getLocalPathForWrite(basedir.toString(), size, conf);
    destPath = new Path(destPath, Long.toString(uniqueNumberGenerator.incrementAndGet()));
    FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, destPath, rsrc);
    pending.put(rsrc, exec.submit(fsd));
    exec.shutdown();
    while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS)) ;
    Assert.assertTrue(pending.get(rsrc).isDone());
    try {
        for (Map.Entry<LocalResource, Future<Path>> p : pending.entrySet()) {
            p.getValue().get();
            Assert.fail("We localized a file that is not public.");
        }
    } catch (ExecutionException e) {
        Assert.assertTrue(e.getCause() instanceof IOException);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) IOException(java.io.IOException) LocalResource(org.apache.hadoop.yarn.api.records.LocalResource) LocalResourceVisibility(org.apache.hadoop.yarn.api.records.LocalResourceVisibility) Random(java.util.Random) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) LocalDirAllocator(org.apache.hadoop.fs.LocalDirAllocator) ExecutionException(java.util.concurrent.ExecutionException) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) FileContext(org.apache.hadoop.fs.FileContext) Test(org.junit.Test)

Example 12 with FileContext

use of org.apache.hadoop.fs.FileContext in project hadoop by apache.

the class TestFSDownload method testDirDownload.

@Test(timeout = 10000)
public void testDirDownload() throws IOException, InterruptedException {
    Configuration conf = new Configuration();
    FileContext files = FileContext.getLocalFSFileContext(conf);
    final Path basedir = files.makeQualified(new Path("target", TestFSDownload.class.getSimpleName()));
    files.mkdir(basedir, null, true);
    conf.setStrings(TestFSDownload.class.getName(), basedir.toString());
    Map<LocalResource, LocalResourceVisibility> rsrcVis = new HashMap<LocalResource, LocalResourceVisibility>();
    Random rand = new Random();
    long sharedSeed = rand.nextLong();
    rand.setSeed(sharedSeed);
    System.out.println("SEED: " + sharedSeed);
    Map<LocalResource, Future<Path>> pending = new HashMap<LocalResource, Future<Path>>();
    ExecutorService exec = HadoopExecutors.newSingleThreadExecutor();
    LocalDirAllocator dirs = new LocalDirAllocator(TestFSDownload.class.getName());
    for (int i = 0; i < 5; ++i) {
        LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
        if (i % 2 == 1) {
            vis = LocalResourceVisibility.APPLICATION;
        }
        Path p = new Path(basedir, "dir" + i + ".jar");
        LocalResource rsrc = createJar(files, p, vis);
        rsrcVis.put(rsrc, vis);
        Path destPath = dirs.getLocalPathForWrite(basedir.toString(), conf);
        destPath = new Path(destPath, Long.toString(uniqueNumberGenerator.incrementAndGet()));
        FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, destPath, rsrc);
        pending.put(rsrc, exec.submit(fsd));
    }
    exec.shutdown();
    while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS)) ;
    for (Future<Path> path : pending.values()) {
        Assert.assertTrue(path.isDone());
    }
    try {
        for (Map.Entry<LocalResource, Future<Path>> p : pending.entrySet()) {
            Path localized = p.getValue().get();
            FileStatus status = files.getFileStatus(localized);
            System.out.println("Testing path " + localized);
            assert (status.isDirectory());
            assert (rsrcVis.containsKey(p.getKey()));
            verifyPermsRecursively(localized.getFileSystem(conf), files, localized, rsrcVis.get(p.getKey()));
        }
    } catch (ExecutionException e) {
        throw new IOException("Failed exec", e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) IOException(java.io.IOException) LocalResource(org.apache.hadoop.yarn.api.records.LocalResource) LocalResourceVisibility(org.apache.hadoop.yarn.api.records.LocalResourceVisibility) Random(java.util.Random) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) LocalDirAllocator(org.apache.hadoop.fs.LocalDirAllocator) ExecutionException(java.util.concurrent.ExecutionException) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ConcurrentMap(java.util.concurrent.ConcurrentMap) FileContext(org.apache.hadoop.fs.FileContext) Test(org.junit.Test)

Example 13 with FileContext

use of org.apache.hadoop.fs.FileContext in project hadoop by apache.

the class TestEventFlow method testSuccessfulContainerLaunch.

@Test
public void testSuccessfulContainerLaunch() throws InterruptedException, IOException, YarnException {
    FileContext localFS = FileContext.getLocalFSFileContext();
    localFS.delete(new Path(localDir.getAbsolutePath()), true);
    localFS.delete(new Path(localLogDir.getAbsolutePath()), true);
    localFS.delete(new Path(remoteLogDir.getAbsolutePath()), true);
    localDir.mkdir();
    localLogDir.mkdir();
    remoteLogDir.mkdir();
    YarnConfiguration conf = new YarnConfiguration();
    Context context = new NMContext(new NMContainerTokenSecretManager(conf), new NMTokenSecretManagerInNM(), null, null, new NMNullStateStoreService(), false, conf) {

        @Override
        public int getHttpPort() {
            return 1234;
        }
    };
    conf.set(YarnConfiguration.NM_LOCAL_DIRS, localDir.getAbsolutePath());
    conf.set(YarnConfiguration.NM_LOG_DIRS, localLogDir.getAbsolutePath());
    conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogDir.getAbsolutePath());
    conf.set(YarnConfiguration.NM_LOCALIZER_ADDRESS, "0.0.0.0:" + ServerSocketUtil.getPort(8040, 10));
    ContainerExecutor exec = new DefaultContainerExecutor();
    exec.setConf(conf);
    DeletionService del = new DeletionService(exec);
    Dispatcher dispatcher = new AsyncDispatcher();
    LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService();
    NodeHealthCheckerService healthChecker = new NodeHealthCheckerService(NodeManager.getNodeHealthScriptRunner(conf), dirsHandler);
    healthChecker.init(conf);
    NodeManagerMetrics metrics = NodeManagerMetrics.create();
    NodeStatusUpdater nodeStatusUpdater = new NodeStatusUpdaterImpl(context, dispatcher, healthChecker, metrics) {

        @Override
        protected ResourceTracker getRMClient() {
            return new LocalRMInterface();
        }

        ;

        @Override
        protected void stopRMProxy() {
            return;
        }

        @Override
        protected void startStatusUpdater() {
            // Don't start any updating thread.
            return;
        }

        @Override
        public long getRMIdentifier() {
            return SIMULATED_RM_IDENTIFIER;
        }
    };
    DummyContainerManager containerManager = new DummyContainerManager(context, exec, del, nodeStatusUpdater, metrics, dirsHandler);
    nodeStatusUpdater.init(conf);
    ((NMContext) context).setContainerManager(containerManager);
    nodeStatusUpdater.start();
    ((NMContext) context).setNodeStatusUpdater(nodeStatusUpdater);
    containerManager.init(conf);
    containerManager.start();
    ContainerLaunchContext launchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class);
    ApplicationId applicationId = ApplicationId.newInstance(0, 0);
    ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(applicationId, 0);
    ContainerId cID = ContainerId.newContainerId(applicationAttemptId, 0);
    String user = "testing";
    StartContainerRequest scRequest = StartContainerRequest.newInstance(launchContext, TestContainerManager.createContainerToken(cID, SIMULATED_RM_IDENTIFIER, context.getNodeId(), user, context.getContainerTokenSecretManager()));
    List<StartContainerRequest> list = new ArrayList<StartContainerRequest>();
    list.add(scRequest);
    StartContainersRequest allRequests = StartContainersRequest.newInstance(list);
    containerManager.startContainers(allRequests);
    BaseContainerManagerTest.waitForContainerState(containerManager, cID, Arrays.asList(ContainerState.RUNNING, ContainerState.SCHEDULED), 20);
    List<ContainerId> containerIds = new ArrayList<ContainerId>();
    containerIds.add(cID);
    StopContainersRequest stopRequest = StopContainersRequest.newInstance(containerIds);
    containerManager.stopContainers(stopRequest);
    BaseContainerManagerTest.waitForContainerState(containerManager, cID, ContainerState.COMPLETE);
    containerManager.stop();
}
Also used : ArrayList(java.util.ArrayList) NMTokenSecretManagerInNM(org.apache.hadoop.yarn.server.nodemanager.security.NMTokenSecretManagerInNM) Dispatcher(org.apache.hadoop.yarn.event.Dispatcher) AsyncDispatcher(org.apache.hadoop.yarn.event.AsyncDispatcher) NMNullStateStoreService(org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) NMContainerTokenSecretManager(org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager) NodeManagerMetrics(org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics) StopContainersRequest(org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest) Path(org.apache.hadoop.fs.Path) FileContext(org.apache.hadoop.fs.FileContext) NMContext(org.apache.hadoop.yarn.server.nodemanager.NodeManager.NMContext) ContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext) StartContainersRequest(org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest) ContainerLaunchContext(org.apache.hadoop.yarn.api.records.ContainerLaunchContext) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) StartContainerRequest(org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest) NMContext(org.apache.hadoop.yarn.server.nodemanager.NodeManager.NMContext) AsyncDispatcher(org.apache.hadoop.yarn.event.AsyncDispatcher) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) FileContext(org.apache.hadoop.fs.FileContext) BaseContainerManagerTest(org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest) Test(org.junit.Test)

Example 14 with FileContext

use of org.apache.hadoop.fs.FileContext in project hadoop by apache.

the class TestJobHistoryUtils method testGetHistoryDirsForCleaning.

@Test
@SuppressWarnings("unchecked")
public void testGetHistoryDirsForCleaning() throws IOException {
    Path pRoot = new Path(TEST_DIR, "org.apache.hadoop.mapreduce.v2.jobhistory." + "TestJobHistoryUtils.testGetHistoryDirsForCleaning");
    FileContext fc = FileContext.getFileContext();
    Calendar cCal = Calendar.getInstance();
    int year = 2013;
    int month = 7;
    int day = 21;
    cCal.set(year, month - 1, day, 1, 0);
    long cutoff = cCal.getTimeInMillis();
    clearDir(fc, pRoot);
    Path pId00 = createPath(fc, pRoot, year, month, day, "000000");
    Path pId01 = createPath(fc, pRoot, year, month, day + 1, "000001");
    Path pId02 = createPath(fc, pRoot, year, month, day - 1, "000002");
    Path pId03 = createPath(fc, pRoot, year, month + 1, day, "000003");
    Path pId04 = createPath(fc, pRoot, year, month + 1, day + 1, "000004");
    Path pId05 = createPath(fc, pRoot, year, month + 1, day - 1, "000005");
    Path pId06 = createPath(fc, pRoot, year, month - 1, day, "000006");
    Path pId07 = createPath(fc, pRoot, year, month - 1, day + 1, "000007");
    Path pId08 = createPath(fc, pRoot, year, month - 1, day - 1, "000008");
    Path pId09 = createPath(fc, pRoot, year + 1, month, day, "000009");
    Path pId10 = createPath(fc, pRoot, year + 1, month, day + 1, "000010");
    Path pId11 = createPath(fc, pRoot, year + 1, month, day - 1, "000011");
    Path pId12 = createPath(fc, pRoot, year + 1, month + 1, day, "000012");
    Path pId13 = createPath(fc, pRoot, year + 1, month + 1, day + 1, "000013");
    Path pId14 = createPath(fc, pRoot, year + 1, month + 1, day - 1, "000014");
    Path pId15 = createPath(fc, pRoot, year + 1, month - 1, day, "000015");
    Path pId16 = createPath(fc, pRoot, year + 1, month - 1, day + 1, "000016");
    Path pId17 = createPath(fc, pRoot, year + 1, month - 1, day - 1, "000017");
    Path pId18 = createPath(fc, pRoot, year - 1, month, day, "000018");
    Path pId19 = createPath(fc, pRoot, year - 1, month, day + 1, "000019");
    Path pId20 = createPath(fc, pRoot, year - 1, month, day - 1, "000020");
    Path pId21 = createPath(fc, pRoot, year - 1, month + 1, day, "000021");
    Path pId22 = createPath(fc, pRoot, year - 1, month + 1, day + 1, "000022");
    Path pId23 = createPath(fc, pRoot, year - 1, month + 1, day - 1, "000023");
    Path pId24 = createPath(fc, pRoot, year - 1, month - 1, day, "000024");
    Path pId25 = createPath(fc, pRoot, year - 1, month - 1, day + 1, "000025");
    Path pId26 = createPath(fc, pRoot, year - 1, month - 1, day - 1, "000026");
    // non-expected names should be ignored without problems
    Path pId27 = createPath(fc, pRoot, "foo", "" + month, "" + day, "000027");
    Path pId28 = createPath(fc, pRoot, "" + year, "foo", "" + day, "000028");
    Path pId29 = createPath(fc, pRoot, "" + year, "" + month, "foo", "000029");
    List<FileStatus> dirs = JobHistoryUtils.getHistoryDirsForCleaning(fc, pRoot, cutoff);
    Collections.sort(dirs);
    Assert.assertEquals(14, dirs.size());
    Assert.assertEquals(pId26.toUri().getPath(), dirs.get(0).getPath().toUri().getPath());
    Assert.assertEquals(pId24.toUri().getPath(), dirs.get(1).getPath().toUri().getPath());
    Assert.assertEquals(pId25.toUri().getPath(), dirs.get(2).getPath().toUri().getPath());
    Assert.assertEquals(pId20.toUri().getPath(), dirs.get(3).getPath().toUri().getPath());
    Assert.assertEquals(pId18.toUri().getPath(), dirs.get(4).getPath().toUri().getPath());
    Assert.assertEquals(pId19.toUri().getPath(), dirs.get(5).getPath().toUri().getPath());
    Assert.assertEquals(pId23.toUri().getPath(), dirs.get(6).getPath().toUri().getPath());
    Assert.assertEquals(pId21.toUri().getPath(), dirs.get(7).getPath().toUri().getPath());
    Assert.assertEquals(pId22.toUri().getPath(), dirs.get(8).getPath().toUri().getPath());
    Assert.assertEquals(pId08.toUri().getPath(), dirs.get(9).getPath().toUri().getPath());
    Assert.assertEquals(pId06.toUri().getPath(), dirs.get(10).getPath().toUri().getPath());
    Assert.assertEquals(pId07.toUri().getPath(), dirs.get(11).getPath().toUri().getPath());
    Assert.assertEquals(pId02.toUri().getPath(), dirs.get(12).getPath().toUri().getPath());
    Assert.assertEquals(pId00.toUri().getPath(), dirs.get(13).getPath().toUri().getPath());
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) Calendar(java.util.Calendar) FileContext(org.apache.hadoop.fs.FileContext) Test(org.junit.Test)

Example 15 with FileContext

use of org.apache.hadoop.fs.FileContext in project hadoop by apache.

the class TestJobHistoryParsing method checkHistoryParsing.

private void checkHistoryParsing(final int numMaps, final int numReduces, final int numSuccessfulMaps) throws Exception {
    Configuration conf = new Configuration();
    conf.set(MRJobConfig.USER_NAME, System.getProperty("user.name"));
    long amStartTimeEst = System.currentTimeMillis();
    conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
    RackResolver.init(conf);
    MRApp app = new MRAppWithHistory(numMaps, numReduces, true, this.getClass().getName(), true);
    app.submit(conf);
    Job job = app.getContext().getAllJobs().values().iterator().next();
    JobId jobId = job.getID();
    LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
    app.waitForState(job, JobState.SUCCEEDED);
    // make sure all events are flushed
    app.waitForState(Service.STATE.STOPPED);
    String jobhistoryDir = JobHistoryUtils.getHistoryIntermediateDoneDirForUser(conf);
    FileContext fc = null;
    try {
        fc = FileContext.getFileContext(conf);
    } catch (IOException ioe) {
        LOG.info("Can not get FileContext", ioe);
        throw (new Exception("Can not get File Context"));
    }
    if (numMaps == numSuccessfulMaps) {
        String summaryFileName = JobHistoryUtils.getIntermediateSummaryFileName(jobId);
        Path summaryFile = new Path(jobhistoryDir, summaryFileName);
        String jobSummaryString = getJobSummary(fc, summaryFile);
        Assert.assertNotNull(jobSummaryString);
        Assert.assertTrue(jobSummaryString.contains("resourcesPerMap=100"));
        Assert.assertTrue(jobSummaryString.contains("resourcesPerReduce=100"));
        Map<String, String> jobSummaryElements = new HashMap<String, String>();
        StringTokenizer strToken = new StringTokenizer(jobSummaryString, ",");
        while (strToken.hasMoreTokens()) {
            String keypair = strToken.nextToken();
            jobSummaryElements.put(keypair.split("=")[0], keypair.split("=")[1]);
        }
        Assert.assertEquals("JobId does not match", jobId.toString(), jobSummaryElements.get("jobId"));
        Assert.assertEquals("JobName does not match", "test", jobSummaryElements.get("jobName"));
        Assert.assertTrue("submitTime should not be 0", Long.parseLong(jobSummaryElements.get("submitTime")) != 0);
        Assert.assertTrue("launchTime should not be 0", Long.parseLong(jobSummaryElements.get("launchTime")) != 0);
        Assert.assertTrue("firstMapTaskLaunchTime should not be 0", Long.parseLong(jobSummaryElements.get("firstMapTaskLaunchTime")) != 0);
        Assert.assertTrue("firstReduceTaskLaunchTime should not be 0", Long.parseLong(jobSummaryElements.get("firstReduceTaskLaunchTime")) != 0);
        Assert.assertTrue("finishTime should not be 0", Long.parseLong(jobSummaryElements.get("finishTime")) != 0);
        Assert.assertEquals("Mismatch in num map slots", numSuccessfulMaps, Integer.parseInt(jobSummaryElements.get("numMaps")));
        Assert.assertEquals("Mismatch in num reduce slots", numReduces, Integer.parseInt(jobSummaryElements.get("numReduces")));
        Assert.assertEquals("User does not match", System.getProperty("user.name"), jobSummaryElements.get("user"));
        Assert.assertEquals("Queue does not match", "default", jobSummaryElements.get("queue"));
        Assert.assertEquals("Status does not match", "SUCCEEDED", jobSummaryElements.get("status"));
    }
    JobHistory jobHistory = new JobHistory();
    jobHistory.init(conf);
    HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
    JobInfo jobInfo;
    long numFinishedMaps;
    synchronized (fileInfo) {
        Path historyFilePath = fileInfo.getHistoryFile();
        FSDataInputStream in = null;
        LOG.info("JobHistoryFile is: " + historyFilePath);
        try {
            in = fc.open(fc.makeQualified(historyFilePath));
        } catch (IOException ioe) {
            LOG.info("Can not open history file: " + historyFilePath, ioe);
            throw (new Exception("Can not open History File"));
        }
        JobHistoryParser parser = new JobHistoryParser(in);
        final EventReader realReader = new EventReader(in);
        EventReader reader = Mockito.mock(EventReader.class);
        if (numMaps == numSuccessfulMaps) {
            reader = realReader;
        } else {
            // Hack!
            final AtomicInteger numFinishedEvents = new AtomicInteger(0);
            Mockito.when(reader.getNextEvent()).thenAnswer(new Answer<HistoryEvent>() {

                public HistoryEvent answer(InvocationOnMock invocation) throws IOException {
                    HistoryEvent event = realReader.getNextEvent();
                    if (event instanceof TaskFinishedEvent) {
                        numFinishedEvents.incrementAndGet();
                    }
                    if (numFinishedEvents.get() <= numSuccessfulMaps) {
                        return event;
                    } else {
                        throw new IOException("test");
                    }
                }
            });
        }
        jobInfo = parser.parse(reader);
        numFinishedMaps = computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps);
        if (numFinishedMaps != numMaps) {
            Exception parseException = parser.getParseException();
            Assert.assertNotNull("Didn't get expected parse exception", parseException);
        }
    }
    Assert.assertEquals("Incorrect username ", System.getProperty("user.name"), jobInfo.getUsername());
    Assert.assertEquals("Incorrect jobName ", "test", jobInfo.getJobname());
    Assert.assertEquals("Incorrect queuename ", "default", jobInfo.getJobQueueName());
    Assert.assertEquals("incorrect conf path", "test", jobInfo.getJobConfPath());
    Assert.assertEquals("incorrect finishedMap ", numSuccessfulMaps, numFinishedMaps);
    Assert.assertEquals("incorrect finishedReduces ", numReduces, jobInfo.getFinishedReduces());
    Assert.assertEquals("incorrect uberized ", job.isUber(), jobInfo.getUberized());
    Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
    int totalTasks = allTasks.size();
    Assert.assertEquals("total number of tasks is incorrect  ", (numMaps + numReduces), totalTasks);
    // Verify aminfo
    Assert.assertEquals(1, jobInfo.getAMInfos().size());
    Assert.assertEquals(MRApp.NM_HOST, jobInfo.getAMInfos().get(0).getNodeManagerHost());
    AMInfo amInfo = jobInfo.getAMInfos().get(0);
    Assert.assertEquals(MRApp.NM_PORT, amInfo.getNodeManagerPort());
    Assert.assertEquals(MRApp.NM_HTTP_PORT, amInfo.getNodeManagerHttpPort());
    Assert.assertEquals(1, amInfo.getAppAttemptId().getAttemptId());
    Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId().getApplicationAttemptId());
    Assert.assertTrue(amInfo.getStartTime() <= System.currentTimeMillis() && amInfo.getStartTime() >= amStartTimeEst);
    ContainerId fakeCid = MRApp.newContainerId(-1, -1, -1, -1);
    // Assert at taskAttempt level
    for (TaskInfo taskInfo : allTasks.values()) {
        int taskAttemptCount = taskInfo.getAllTaskAttempts().size();
        Assert.assertEquals("total number of task attempts ", 1, taskAttemptCount);
        TaskAttemptInfo taInfo = taskInfo.getAllTaskAttempts().values().iterator().next();
        Assert.assertNotNull(taInfo.getContainerId());
        // Verify the wrong ctor is not being used. Remove after mrv1 is removed.
        Assert.assertFalse(taInfo.getContainerId().equals(fakeCid));
    }
    // Deep compare Job and JobInfo
    for (Task task : job.getTasks().values()) {
        TaskInfo taskInfo = allTasks.get(TypeConverter.fromYarn(task.getID()));
        Assert.assertNotNull("TaskInfo not found", taskInfo);
        for (TaskAttempt taskAttempt : task.getAttempts().values()) {
            TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get(TypeConverter.fromYarn((taskAttempt.getID())));
            Assert.assertNotNull("TaskAttemptInfo not found", taskAttemptInfo);
            Assert.assertEquals("Incorrect shuffle port for task attempt", taskAttempt.getShufflePort(), taskAttemptInfo.getShufflePort());
            if (numMaps == numSuccessfulMaps) {
                Assert.assertEquals(MRApp.NM_HOST, taskAttemptInfo.getHostname());
                Assert.assertEquals(MRApp.NM_PORT, taskAttemptInfo.getPort());
                // Verify rack-name
                Assert.assertEquals("rack-name is incorrect", taskAttemptInfo.getRackname(), RACK_NAME);
            }
        }
    }
    // test output for HistoryViewer
    PrintStream stdps = System.out;
    try {
        System.setOut(new PrintStream(outContent));
        HistoryViewer viewer;
        synchronized (fileInfo) {
            viewer = new HistoryViewer(fc.makeQualified(fileInfo.getHistoryFile()).toString(), conf, true);
        }
        viewer.print();
        for (TaskInfo taskInfo : allTasks.values()) {
            String test = (taskInfo.getTaskStatus() == null ? "" : taskInfo.getTaskStatus()) + " " + taskInfo.getTaskType() + " task list for " + taskInfo.getTaskId().getJobID();
            Assert.assertTrue(outContent.toString().indexOf(test) > 0);
            Assert.assertTrue(outContent.toString().indexOf(taskInfo.getTaskId().toString()) > 0);
        }
    } finally {
        System.setOut(stdps);
    }
}
Also used : MRAppWithHistory(org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory) Task(org.apache.hadoop.mapreduce.v2.app.job.Task) Configuration(org.apache.hadoop.conf.Configuration) HashMap(java.util.HashMap) TaskInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo) HistoryViewer(org.apache.hadoop.mapreduce.jobhistory.HistoryViewer) JobInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp) Path(org.apache.hadoop.fs.Path) EventReader(org.apache.hadoop.mapreduce.jobhistory.EventReader) PrintStream(java.io.PrintStream) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) TaskID(org.apache.hadoop.mapreduce.TaskID) IOException(java.io.IOException) HistoryEvent(org.apache.hadoop.mapreduce.jobhistory.HistoryEvent) IOException(java.io.IOException) AMInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo) StringTokenizer(java.util.StringTokenizer) TaskFinishedEvent(org.apache.hadoop.mapreduce.jobhistory.TaskFinishedEvent) JobHistoryParser(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) InvocationOnMock(org.mockito.invocation.InvocationOnMock) TaskAttemptInfo(org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) FileContext(org.apache.hadoop.fs.FileContext)

Aggregations

FileContext (org.apache.hadoop.fs.FileContext)84 Path (org.apache.hadoop.fs.Path)71 Test (org.junit.Test)34 Configuration (org.apache.hadoop.conf.Configuration)33 IOException (java.io.IOException)29 File (java.io.File)16 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)14 FileStatus (org.apache.hadoop.fs.FileStatus)13 HashMap (java.util.HashMap)12 FsPermission (org.apache.hadoop.fs.permission.FsPermission)10 ArrayList (java.util.ArrayList)9 FileSystem (org.apache.hadoop.fs.FileSystem)8 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)8 ExecutorService (java.util.concurrent.ExecutorService)7 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)7 URISyntaxException (java.net.URISyntaxException)6 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)6 ExecutionException (java.util.concurrent.ExecutionException)6 Future (java.util.concurrent.Future)6 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)6