use of java.io.PrintWriter in project hadoop by apache.
the class TestContainerLaunch method testInvalidSymlinkDiagnostics.
// test the diagnostics are generated
@Test(timeout = 20000)
public void testInvalidSymlinkDiagnostics() throws IOException {
File shellFile = null;
File tempFile = null;
String symLink = Shell.WINDOWS ? "test.cmd" : "test";
File symLinkFile = null;
try {
shellFile = Shell.appendScriptExtension(tmpDir, "hello");
tempFile = Shell.appendScriptExtension(tmpDir, "temp");
String timeoutCommand = Shell.WINDOWS ? "@echo \"hello\"" : "echo \"hello\"";
PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
FileUtil.setExecutable(shellFile, true);
writer.println(timeoutCommand);
writer.close();
Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
//This is an invalid path and should throw exception because of No such file.
Path invalidPath = new Path(shellFile.getAbsolutePath() + "randomPath");
resources.put(invalidPath, Arrays.asList(symLink));
FileOutputStream fos = new FileOutputStream(tempFile);
Map<String, String> env = new HashMap<String, String>();
List<String> commands = new ArrayList<String>();
if (Shell.WINDOWS) {
commands.add("cmd");
commands.add("/c");
commands.add("\"" + symLink + "\"");
} else {
commands.add("/bin/sh ./\\\"" + symLink + "\\\"");
}
DefaultContainerExecutor defaultContainerExecutor = new DefaultContainerExecutor();
defaultContainerExecutor.setConf(new YarnConfiguration());
defaultContainerExecutor.writeLaunchEnv(fos, env, resources, commands, new Path(localLogDir.getAbsolutePath()), "user");
fos.flush();
fos.close();
FileUtil.setExecutable(tempFile, true);
Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(new String[] { tempFile.getAbsolutePath() }, tmpDir);
String diagnostics = null;
try {
shexc.execute();
Assert.fail("Should catch exception");
} catch (ExitCodeException e) {
diagnostics = e.getMessage();
}
Assert.assertNotNull(diagnostics);
Assert.assertTrue(shexc.getExitCode() != 0);
symLinkFile = new File(tmpDir, symLink);
} finally {
// cleanup
if (shellFile != null && shellFile.exists()) {
shellFile.delete();
}
if (tempFile != null && tempFile.exists()) {
tempFile.delete();
}
if (symLinkFile != null && symLinkFile.exists()) {
symLinkFile.delete();
}
}
}
use of java.io.PrintWriter in project hadoop by apache.
the class TestBlocks method testHsJobsBlock.
/**
* test HsJobsBlock's rendering.
*/
@Test
public void testHsJobsBlock() {
AppContext ctx = mock(AppContext.class);
Map<JobId, Job> jobs = new HashMap<JobId, Job>();
Job job = getJob();
jobs.put(job.getID(), job);
when(ctx.getAllJobs()).thenReturn(jobs);
HsJobsBlock block = new HsJobsBlockForTest(ctx);
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
assertTrue(data.toString().contains("JobName"));
assertTrue(data.toString().contains("UserName"));
assertTrue(data.toString().contains("QueueName"));
assertTrue(data.toString().contains("SUCCEEDED"));
}
use of java.io.PrintWriter in project hadoop by apache.
the class TestBlocks method testAttemptsBlock.
/**
* test AttemptsBlock's rendering.
*/
@Test
public void testAttemptsBlock() {
AppContext ctx = mock(AppContext.class);
AppForTest app = new AppForTest(ctx);
Task task = getTask(0);
Map<TaskAttemptId, TaskAttempt> attempts = new HashMap<TaskAttemptId, TaskAttempt>();
TaskAttempt attempt = mock(TaskAttempt.class);
TaskAttemptId taId = new TaskAttemptIdPBImpl();
taId.setId(0);
taId.setTaskId(task.getID());
when(attempt.getID()).thenReturn(taId);
when(attempt.getNodeHttpAddress()).thenReturn("Node address");
ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 5);
ApplicationAttemptId appAttemptId = ApplicationAttemptIdPBImpl.newInstance(appId, 1);
ContainerId containerId = ContainerIdPBImpl.newContainerId(appAttemptId, 1);
when(attempt.getAssignedContainerID()).thenReturn(containerId);
when(attempt.getAssignedContainerMgrAddress()).thenReturn("assignedContainerMgrAddress");
when(attempt.getNodeRackName()).thenReturn("nodeRackName");
final long taStartTime = 100002L;
final long taFinishTime = 100012L;
final long taShuffleFinishTime = 100010L;
final long taSortFinishTime = 100011L;
final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
when(attempt.getLaunchTime()).thenReturn(taStartTime);
when(attempt.getFinishTime()).thenReturn(taFinishTime);
when(attempt.getShuffleFinishTime()).thenReturn(taShuffleFinishTime);
when(attempt.getSortFinishTime()).thenReturn(taSortFinishTime);
when(attempt.getState()).thenReturn(taState);
TaskAttemptReport taReport = mock(TaskAttemptReport.class);
when(taReport.getStartTime()).thenReturn(taStartTime);
when(taReport.getFinishTime()).thenReturn(taFinishTime);
when(taReport.getShuffleFinishTime()).thenReturn(taShuffleFinishTime);
when(taReport.getSortFinishTime()).thenReturn(taSortFinishTime);
when(taReport.getContainerId()).thenReturn(containerId);
when(taReport.getProgress()).thenReturn(1.0f);
when(taReport.getStateString()).thenReturn("Processed 128/128 records <p> \n");
when(taReport.getTaskAttemptState()).thenReturn(taState);
when(taReport.getDiagnosticInfo()).thenReturn("");
when(attempt.getReport()).thenReturn(taReport);
attempts.put(taId, attempt);
when(task.getAttempts()).thenReturn(attempts);
app.setTask(task);
Job job = mock(Job.class);
when(job.getUserName()).thenReturn("User");
app.setJob(job);
AttemptsBlockForTest block = new AttemptsBlockForTest(app);
block.addParameter(AMParams.TASK_TYPE, "r");
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
// should be printed information about attempts
assertTrue(data.toString().contains("attempt_0_0001_r_000000_0"));
assertTrue(data.toString().contains("SUCCEEDED"));
assertFalse(data.toString().contains("Processed 128/128 records <p> \n"));
assertTrue(data.toString().contains("Processed 128\\/128 records <p> \\n"));
assertTrue(data.toString().contains("_0005_01_000001:attempt_0_0001_r_000000_0:User:"));
assertTrue(data.toString().contains("100002"));
assertTrue(data.toString().contains("100010"));
assertTrue(data.toString().contains("100011"));
assertTrue(data.toString().contains("100012"));
}
use of java.io.PrintWriter in project hadoop by apache.
the class TestHSWebApp method testLogsViewBadStartEnd.
@Test
public void testLogsViewBadStartEnd() throws IOException {
LOG.info("HsLogsPage with bad start/end params");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put("start", "foo");
params.put("end", "bar");
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1).toString());
params.put(NM_NODENAME, NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector = WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx, params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Invalid log start value: foo");
verify(spyPw).write("Invalid log end value: bar");
}
use of java.io.PrintWriter in project hadoop by apache.
the class TestHSWebApp method testLogsView2.
@Test
public void testLogsView2() throws IOException {
LOG.info("HsLogsPage with data");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1).toString());
params.put(NM_NODENAME, NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector = WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx, params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Aggregation is not enabled. Try the nodemanager at " + MockJobs.NM_HOST + ":" + MockJobs.NM_PORT);
}
Aggregations