Search in sources :

Example 1 with TimelineStore

use of org.apache.hadoop.yarn.server.timeline.TimelineStore in project hadoop by apache.

the class TestApplicationHistoryManagerOnTimelineStore method createStore.

public static TimelineStore createStore(int scale) throws Exception {
    TimelineStore store = new MemoryTimelineStore();
    prepareTimelineStore(store, scale);
    return store;
}
Also used : MemoryTimelineStore(org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore) MemoryTimelineStore(org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore) TimelineStore(org.apache.hadoop.yarn.server.timeline.TimelineStore)

Example 2 with TimelineStore

use of org.apache.hadoop.yarn.server.timeline.TimelineStore in project hadoop by apache.

the class TestAHSWebServices method setupClass.

@BeforeClass
public static void setupClass() throws Exception {
    conf = new YarnConfiguration();
    TimelineStore store = TestApplicationHistoryManagerOnTimelineStore.createStore(MAX_APPS);
    TimelineACLsManager aclsManager = new TimelineACLsManager(conf);
    aclsManager.setTimelineStore(store);
    TimelineDataManager dataManager = new TimelineDataManager(store, aclsManager);
    conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
    conf.set(YarnConfiguration.YARN_ADMIN_ACL, "foo");
    conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
    conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogRootDir);
    dataManager.init(conf);
    ApplicationACLsManager appAclsManager = new ApplicationACLsManager(conf);
    ApplicationHistoryManagerOnTimelineStore historyManager = new ApplicationHistoryManagerOnTimelineStore(dataManager, appAclsManager);
    historyManager.init(conf);
    historyClientService = new ApplicationHistoryClientService(historyManager) {

        @Override
        protected void serviceStart() throws Exception {
        // Do Nothing
        }
    };
    historyClientService.init(conf);
    historyClientService.start();
    ahsWebservice = new AHSWebServices(historyClientService, conf) {

        @Override
        public String getNMWebAddressFromRM(Configuration configuration, String nodeId) throws ClientHandlerException, UniformInterfaceException, JSONException {
            if (nodeId.equals(NM_ID)) {
                return NM_WEBADDRESS;
            }
            return null;
        }
    };
    fs = FileSystem.get(conf);
    GuiceServletConfig.setInjector(Guice.createInjector(new WebServletModule()));
}
Also used : ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ApplicationHistoryClientService(org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryClientService) JSONException(org.codehaus.jettison.json.JSONException) TimelineACLsManager(org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager) ServletException(javax.servlet.ServletException) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) JSONException(org.codehaus.jettison.json.JSONException) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException) TimelineDataManager(org.apache.hadoop.yarn.server.timeline.TimelineDataManager) ApplicationACLsManager(org.apache.hadoop.yarn.server.security.ApplicationACLsManager) UniformInterfaceException(com.sun.jersey.api.client.UniformInterfaceException) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ApplicationHistoryManagerOnTimelineStore(org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerOnTimelineStore) TestApplicationHistoryManagerOnTimelineStore(org.apache.hadoop.yarn.server.applicationhistoryservice.TestApplicationHistoryManagerOnTimelineStore) ApplicationHistoryManagerOnTimelineStore(org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerOnTimelineStore) TestApplicationHistoryManagerOnTimelineStore(org.apache.hadoop.yarn.server.applicationhistoryservice.TestApplicationHistoryManagerOnTimelineStore) TimelineStore(org.apache.hadoop.yarn.server.timeline.TimelineStore) BeforeClass(org.junit.BeforeClass)

Example 3 with TimelineStore

use of org.apache.hadoop.yarn.server.timeline.TimelineStore in project hadoop by apache.

the class TestMRTimelineEventHandling method testMRTimelineEventHandling.

@Test
public void testMRTimelineEventHandling() throws Exception {
    Configuration conf = new YarnConfiguration();
    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
    MiniMRYarnCluster cluster = null;
    try {
        cluster = new MiniMRYarnCluster(TestMRTimelineEventHandling.class.getSimpleName(), 1);
        cluster.init(conf);
        cluster.start();
        conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
        TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
        String localPathRoot = System.getProperty("test.build.data", "build/test/data");
        Path inDir = new Path(localPathRoot, "input");
        Path outDir = new Path(localPathRoot, "output");
        RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
        Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
        TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        TimelineEntity tEntity = entities.getEntities().get(0);
        Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
        Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1).getEventType());
        Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(0).getEventType());
        job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir);
        Assert.assertEquals(JobStatus.FAILED, job.getJobStatus().getState().getValue());
        entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(2, entities.getEntities().size());
        tEntity = entities.getEntities().get(0);
        Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
        Assert.assertEquals("MAPREDUCE_JOB", tEntity.getEntityType());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(tEntity.getEvents().size() - 1).getEventType());
        Assert.assertEquals(EventType.JOB_FAILED.toString(), tEntity.getEvents().get(0).getEventType());
    } finally {
        if (cluster != null) {
            cluster.stop();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) MiniMRYarnCluster(org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) TimelineStore(org.apache.hadoop.yarn.server.timeline.TimelineStore) Test(org.junit.Test)

Example 4 with TimelineStore

use of org.apache.hadoop.yarn.server.timeline.TimelineStore in project hadoop by apache.

the class JHEventHandlerForSigtermTest method testTimelineEventHandling.

// Have JobHistoryEventHandler handle some events and make sure they get
// stored to the Timeline store
@Test(timeout = 50000)
public void testTimelineEventHandling() throws Exception {
    TestParams t = new TestParams(RunningAppContext.class, false);
    Configuration conf = new YarnConfiguration();
    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
    long currentTime = System.currentTimeMillis();
    try (MiniYARNCluster yarnCluster = new MiniYARNCluster(TestJobHistoryEventHandler.class.getSimpleName(), 1, 1, 1, 1)) {
        yarnCluster.init(conf);
        yarnCluster.start();
        Configuration confJHEH = new YarnConfiguration(conf);
        confJHEH.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
        confJHEH.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + yarnCluster.getApplicationHistoryServer().getPort());
        JHEvenHandlerForTest jheh = new JHEvenHandlerForTest(t.mockAppContext, 0);
        jheh.init(confJHEH);
        jheh.start();
        TimelineStore ts = yarnCluster.getApplicationHistoryServer().getTimelineStore();
        handleEvent(jheh, new JobHistoryEvent(t.jobId, new AMStartedEvent(t.appAttemptId, 200, t.containerId, "nmhost", 3000, 4000, -1), currentTime - 10));
        TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        TimelineEntity tEntity = entities.getEntities().get(0);
        Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId());
        Assert.assertEquals(1, tEntity.getEvents().size());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(0).getEventType());
        Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(0).getTimestamp());
        handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobSubmittedEvent(TypeConverter.fromYarn(t.jobId), "name", "user", 200, "/foo/job.xml", new HashMap<JobACL, AccessControlList>(), "default"), currentTime + 10));
        entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        tEntity = entities.getEntities().get(0);
        Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId());
        Assert.assertEquals(2, tEntity.getEvents().size());
        Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(0).getEventType());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(1).getEventType());
        Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(0).getTimestamp());
        Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(1).getTimestamp());
        handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobQueueChangeEvent(TypeConverter.fromYarn(t.jobId), "q2"), currentTime - 20));
        entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        tEntity = entities.getEntities().get(0);
        Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId());
        Assert.assertEquals(3, tEntity.getEvents().size());
        Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(0).getEventType());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(1).getEventType());
        Assert.assertEquals(EventType.JOB_QUEUE_CHANGED.toString(), tEntity.getEvents().get(2).getEventType());
        Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(0).getTimestamp());
        Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(1).getTimestamp());
        Assert.assertEquals(currentTime - 20, tEntity.getEvents().get(2).getTimestamp());
        handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobFinishedEvent(TypeConverter.fromYarn(t.jobId), 0, 0, 0, 0, 0, new Counters(), new Counters(), new Counters()), currentTime));
        entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        tEntity = entities.getEntities().get(0);
        Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId());
        Assert.assertEquals(4, tEntity.getEvents().size());
        Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(0).getEventType());
        Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(1).getEventType());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(2).getEventType());
        Assert.assertEquals(EventType.JOB_QUEUE_CHANGED.toString(), tEntity.getEvents().get(3).getEventType());
        Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(0).getTimestamp());
        Assert.assertEquals(currentTime, tEntity.getEvents().get(1).getTimestamp());
        Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(2).getTimestamp());
        Assert.assertEquals(currentTime - 20, tEntity.getEvents().get(3).getTimestamp());
        handleEvent(jheh, new JobHistoryEvent(t.jobId, new JobUnsuccessfulCompletionEvent(TypeConverter.fromYarn(t.jobId), 0, 0, 0, JobStateInternal.KILLED.toString()), currentTime + 20));
        entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        tEntity = entities.getEntities().get(0);
        Assert.assertEquals(t.jobId.toString(), tEntity.getEntityId());
        Assert.assertEquals(5, tEntity.getEvents().size());
        Assert.assertEquals(EventType.JOB_KILLED.toString(), tEntity.getEvents().get(0).getEventType());
        Assert.assertEquals(EventType.JOB_SUBMITTED.toString(), tEntity.getEvents().get(1).getEventType());
        Assert.assertEquals(EventType.JOB_FINISHED.toString(), tEntity.getEvents().get(2).getEventType());
        Assert.assertEquals(EventType.AM_STARTED.toString(), tEntity.getEvents().get(3).getEventType());
        Assert.assertEquals(EventType.JOB_QUEUE_CHANGED.toString(), tEntity.getEvents().get(4).getEventType());
        Assert.assertEquals(currentTime + 20, tEntity.getEvents().get(0).getTimestamp());
        Assert.assertEquals(currentTime + 10, tEntity.getEvents().get(1).getTimestamp());
        Assert.assertEquals(currentTime, tEntity.getEvents().get(2).getTimestamp());
        Assert.assertEquals(currentTime - 10, tEntity.getEvents().get(3).getTimestamp());
        Assert.assertEquals(currentTime - 20, tEntity.getEvents().get(4).getTimestamp());
        handleEvent(jheh, new JobHistoryEvent(t.jobId, new TaskStartedEvent(t.taskID, 0, TaskType.MAP, "")));
        entities = ts.getEntities("MAPREDUCE_TASK", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        tEntity = entities.getEntities().get(0);
        Assert.assertEquals(t.taskID.toString(), tEntity.getEntityId());
        Assert.assertEquals(1, tEntity.getEvents().size());
        Assert.assertEquals(EventType.TASK_STARTED.toString(), tEntity.getEvents().get(0).getEventType());
        Assert.assertEquals(TaskType.MAP.toString(), tEntity.getEvents().get(0).getEventInfo().get("TASK_TYPE"));
        handleEvent(jheh, new JobHistoryEvent(t.jobId, new TaskStartedEvent(t.taskID, 0, TaskType.REDUCE, "")));
        entities = ts.getEntities("MAPREDUCE_TASK", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        tEntity = entities.getEntities().get(0);
        Assert.assertEquals(t.taskID.toString(), tEntity.getEntityId());
        Assert.assertEquals(2, tEntity.getEvents().size());
        Assert.assertEquals(EventType.TASK_STARTED.toString(), tEntity.getEvents().get(1).getEventType());
        Assert.assertEquals(TaskType.REDUCE.toString(), tEntity.getEvents().get(0).getEventInfo().get("TASK_TYPE"));
        Assert.assertEquals(TaskType.MAP.toString(), tEntity.getEvents().get(1).getEventInfo().get("TASK_TYPE"));
    }
}
Also used : AccessControlList(org.apache.hadoop.security.authorize.AccessControlList) Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) Counters(org.apache.hadoop.mapreduce.Counters) MiniYARNCluster(org.apache.hadoop.yarn.server.MiniYARNCluster) TimelineStore(org.apache.hadoop.yarn.server.timeline.TimelineStore) JobACL(org.apache.hadoop.mapreduce.JobACL) Test(org.junit.Test)

Example 5 with TimelineStore

use of org.apache.hadoop.yarn.server.timeline.TimelineStore in project hadoop by apache.

the class TestMRTimelineEventHandling method testMapreduceJobTimelineServiceEnabled.

@Test
public void testMapreduceJobTimelineServiceEnabled() throws Exception {
    Configuration conf = new YarnConfiguration();
    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
    MiniMRYarnCluster cluster = null;
    FileSystem fs = null;
    Path inDir = new Path(GenericTestUtils.getTempPath("input"));
    Path outDir = new Path(GenericTestUtils.getTempPath("output"));
    try {
        fs = FileSystem.get(conf);
        cluster = new MiniMRYarnCluster(TestMRTimelineEventHandling.class.getSimpleName(), 1);
        cluster.init(conf);
        cluster.start();
        conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
        TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
        RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
        Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
        TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(0, entities.getEntities().size());
        conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
        job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
        Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
        entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        TimelineEntity tEntity = entities.getEntities().get(0);
        Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
    } finally {
        if (cluster != null) {
            cluster.stop();
        }
        deletePaths(fs, inDir, outDir);
    }
    conf = new YarnConfiguration();
    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
    conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
    cluster = null;
    try {
        cluster = new MiniMRYarnCluster(TestJobHistoryEventHandler.class.getSimpleName(), 1);
        cluster.init(conf);
        cluster.start();
        conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, MiniYARNCluster.getHostname() + ":" + cluster.getApplicationHistoryServer().getPort());
        TimelineStore ts = cluster.getApplicationHistoryServer().getTimelineStore();
        conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
        RunningJob job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
        Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
        TimelineEntities entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(0, entities.getEntities().size());
        conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, true);
        job = UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
        Assert.assertEquals(JobStatus.SUCCEEDED, job.getJobStatus().getState().getValue());
        entities = ts.getEntities("MAPREDUCE_JOB", null, null, null, null, null, null, null, null, null);
        Assert.assertEquals(1, entities.getEntities().size());
        TimelineEntity tEntity = entities.getEntities().get(0);
        Assert.assertEquals(job.getID().toString(), tEntity.getEntityId());
    } finally {
        if (cluster != null) {
            cluster.stop();
        }
        deletePaths(fs, inDir, outDir);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) MiniMRYarnCluster(org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster) TimelineEntities(org.apache.hadoop.yarn.api.records.timeline.TimelineEntities) FileSystem(org.apache.hadoop.fs.FileSystem) TimelineEntity(org.apache.hadoop.yarn.api.records.timeline.TimelineEntity) TimelineStore(org.apache.hadoop.yarn.server.timeline.TimelineStore) Test(org.junit.Test)

Aggregations

TimelineStore (org.apache.hadoop.yarn.server.timeline.TimelineStore)6 Configuration (org.apache.hadoop.conf.Configuration)5 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)5 TimelineEntities (org.apache.hadoop.yarn.api.records.timeline.TimelineEntities)3 TimelineEntity (org.apache.hadoop.yarn.api.records.timeline.TimelineEntity)3 Test (org.junit.Test)3 Path (org.apache.hadoop.fs.Path)2 MiniMRYarnCluster (org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster)2 ApplicationACLsManager (org.apache.hadoop.yarn.server.security.ApplicationACLsManager)2 TimelineDataManager (org.apache.hadoop.yarn.server.timeline.TimelineDataManager)2 TimelineACLsManager (org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager)2 BeforeClass (org.junit.BeforeClass)2 ClientHandlerException (com.sun.jersey.api.client.ClientHandlerException)1 UniformInterfaceException (com.sun.jersey.api.client.UniformInterfaceException)1 ServletException (javax.servlet.ServletException)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)1 Counters (org.apache.hadoop.mapreduce.Counters)1 JobACL (org.apache.hadoop.mapreduce.JobACL)1 AccessControlList (org.apache.hadoop.security.authorize.AccessControlList)1