use of org.apache.hadoop.mapreduce.v2.app.MockAppContext in project hadoop by apache.
the class TestAMWebApp method testCountersView.
@Test
public void testCountersView() {
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = getJobParams(appContext);
WebAppTests.testPage(CountersPage.class, AppContext.class, appContext, params);
}
use of org.apache.hadoop.mapreduce.v2.app.MockAppContext in project hadoop by apache.
the class TestHSWebApp method testAttemptsWithJobView.
@Test
public void testAttemptsWithJobView() {
LOG.info("HsAttemptsPage with data");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
JobId id = ctx.getAllJobs().keySet().iterator().next();
Map<String, String> params = new HashMap<String, String>();
params.put(JOB_ID, id.toString());
params.put(TASK_TYPE, "m");
params.put(ATTEMPT_STATE, "SUCCESSFUL");
WebAppTests.testPage(HsAttemptsPage.class, AppContext.class, ctx, params);
}
use of org.apache.hadoop.mapreduce.v2.app.MockAppContext in project hadoop by apache.
the class TestHSWebApp method testTasksViewNaturalSortType.
@Test
public void testTasksViewNaturalSortType() {
LOG.info("HsTasksPage");
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
Injector testPage = WebAppTests.testPage(HsTasksPage.class, AppContext.class, appContext, params);
View viewInstance = testPage.getInstance(HsTasksPage.class);
Map<String, String> moreParams = viewInstance.context().requestContext().moreParams();
String appTableColumnsMeta = moreParams.get("ui.dataTables.selector.init");
Assert.assertTrue(appTableColumnsMeta.indexOf("natural") != -1);
}
use of org.apache.hadoop.mapreduce.v2.app.MockAppContext in project hadoop by apache.
the class TestHSWebApp method testJobCounterViewForKilledJob.
@Test
public void testJobCounterViewForKilledJob() {
LOG.info("JobCounterViewForKilledJob");
AppContext appContext = new MockAppContext(0, 1, 1, 1, true);
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
WebAppTests.testPage(HsCountersPage.class, AppContext.class, appContext, params);
}
use of org.apache.hadoop.mapreduce.v2.app.MockAppContext in project hadoop by apache.
the class TestHSWebApp method testLogsViewSingle.
@Test
public void testLogsViewSingle() throws IOException {
LOG.info("HsLogsPage with params for single log and data limits");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
final Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
params.put("start", "-2048");
params.put("end", "-1024");
params.put(CONTAINER_LOG_TYPE, "syslog");
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1).toString());
params.put(NM_NODENAME, NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector = WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx, params, new AbstractModule() {
@Override
protected void configure() {
bind(Configuration.class).toInstance(conf);
}
});
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Logs not available for container_10_0001_01_000001." + " Aggregation may not be complete, " + "Check back later or try the nodemanager at " + MockJobs.NM_HOST + ":" + MockJobs.NM_PORT);
}
Aggregations