use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestAMWebApp method testMRWebAppSSLDisabled.
@Test
public void testMRWebAppSSLDisabled() throws Exception {
MRApp app = new MRApp(2, 2, true, this.getClass().getName(), true) {
@Override
protected ClientService createClientService(AppContext context) {
return new MRClientService(context);
}
};
Configuration conf = new Configuration();
// MR is explicitly disabling SSL, even though setting as HTTPS_ONLY
conf.set(YarnConfiguration.YARN_HTTP_POLICY_KEY, Policy.HTTPS_ONLY.name());
Job job = app.submit(conf);
String hostPort = NetUtils.getHostPortString(((MRClientService) app.getClientService()).getWebApp().getListenerAddress());
// http:// should be accessible
URL httpUrl = new URL("http://" + hostPort);
HttpURLConnection conn = (HttpURLConnection) httpUrl.openConnection();
InputStream in = conn.getInputStream();
ByteArrayOutputStream out = new ByteArrayOutputStream();
IOUtils.copyBytes(in, out, 1024);
Assert.assertTrue(out.toString().contains("MapReduce Application"));
// https:// is not accessible.
URL httpsUrl = new URL("https://" + hostPort);
try {
HttpURLConnection httpsConn = (HttpURLConnection) httpsUrl.openConnection();
httpsConn.getInputStream();
Assert.fail("https:// is not accessible, expected to fail");
} catch (Exception e) {
Assert.assertTrue(e instanceof SSLException);
}
app.waitForState(job, JobState.SUCCEEDED);
app.verifyCompleted();
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestAMWebApp method testSingleTaskCounterView.
@Test
public void testSingleTaskCounterView() {
AppContext appContext = new MockAppContext(0, 1, 1, 2);
Map<String, String> params = getTaskParams(appContext);
params.put(AMParams.COUNTER_GROUP, "org.apache.hadoop.mapreduce.FileSystemCounter");
params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
// remove counters from one task attempt
// to test handling of missing counters
TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
Job job = appContext.getJob(taskID.getJobId());
Task task = job.getTask(taskID);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
attempt.getReport().setCounters(null);
WebAppTests.testPage(SingleCounterPage.class, AppContext.class, appContext, params);
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestAMWebApp method getJobParams.
public static Map<String, String> getJobParams(AppContext appContext) {
JobId jobId = appContext.getAllJobs().entrySet().iterator().next().getKey();
Map<String, String> params = new HashMap<String, String>();
params.put(AMParams.JOB_ID, MRApps.toString(jobId));
return params;
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestAMWebApp method testCountersView.
@Test
public void testCountersView() {
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = getJobParams(appContext);
WebAppTests.testPage(CountersPage.class, AppContext.class, appContext, params);
}
use of org.apache.hadoop.mapreduce.v2.app.AppContext in project hadoop by apache.
the class TestTaskImpl method testCountersWithSpeculation.
@Test
public void testCountersWithSpeculation() {
mockTask = new MockTaskImpl(jobId, partition, dispatcher.getEventHandler(), remoteJobConfFile, conf, taskAttemptListener, jobToken, credentials, clock, startCount, metrics, appContext, TaskType.MAP) {
@Override
protected int getMaxAttempts() {
return 1;
}
};
TaskId taskId = getNewTaskID();
scheduleTaskAttempt(taskId);
launchTaskAttempt(getLastAttempt().getAttemptId());
updateLastAttemptState(TaskAttemptState.RUNNING);
MockTaskAttemptImpl baseAttempt = getLastAttempt();
// add a speculative attempt
mockTask.handle(new TaskTAttemptEvent(getLastAttempt().getAttemptId(), TaskEventType.T_ADD_SPEC_ATTEMPT));
launchTaskAttempt(getLastAttempt().getAttemptId());
updateLastAttemptState(TaskAttemptState.RUNNING);
MockTaskAttemptImpl specAttempt = getLastAttempt();
assertEquals(2, taskAttempts.size());
Counters specAttemptCounters = new Counters();
Counter cpuCounter = specAttemptCounters.findCounter(TaskCounter.CPU_MILLISECONDS);
cpuCounter.setValue(1000);
specAttempt.setCounters(specAttemptCounters);
// have the spec attempt succeed but second attempt at 1.0 progress as well
commitTaskAttempt(specAttempt.getAttemptId());
specAttempt.setProgress(1.0f);
specAttempt.setState(TaskAttemptState.SUCCEEDED);
mockTask.handle(new TaskTAttemptEvent(specAttempt.getAttemptId(), TaskEventType.T_ATTEMPT_SUCCEEDED));
assertEquals(TaskState.SUCCEEDED, mockTask.getState());
baseAttempt.setProgress(1.0f);
Counters taskCounters = mockTask.getCounters();
assertEquals("wrong counters for task", specAttemptCounters, taskCounters);
}
Aggregations