use of org.apache.hadoop.yarn.api.records.URL in project hadoop by apache.
the class TestLogAggregationService method testLogAggregationForRealContainerLaunch.
@Test
public void testLogAggregationForRealContainerLaunch() throws IOException, InterruptedException, YarnException {
this.containerManager.start();
File scriptFile = new File(tmpDir, "scriptFile.sh");
PrintWriter fileWriter = new PrintWriter(scriptFile);
fileWriter.write("\necho Hello World! Stdout! > " + new File(localLogDir, "stdout"));
fileWriter.write("\necho Hello World! Stderr! > " + new File(localLogDir, "stderr"));
fileWriter.write("\necho Hello World! Syslog! > " + new File(localLogDir, "syslog"));
fileWriter.close();
ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class);
// ////// Construct the Container-id
ApplicationId appId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(appId, 1);
ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0);
URL resource_alpha = URL.fromPath(localFS.makeQualified(new Path(scriptFile.getAbsolutePath())));
LocalResource rsrc_alpha = recordFactory.newRecordInstance(LocalResource.class);
rsrc_alpha.setResource(resource_alpha);
rsrc_alpha.setSize(-1);
rsrc_alpha.setVisibility(LocalResourceVisibility.APPLICATION);
rsrc_alpha.setType(LocalResourceType.FILE);
rsrc_alpha.setTimestamp(scriptFile.lastModified());
String destinationFile = "dest_file";
Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();
localResources.put(destinationFile, rsrc_alpha);
containerLaunchContext.setLocalResources(localResources);
List<String> commands = new ArrayList<String>();
commands.add("/bin/bash");
commands.add(scriptFile.getAbsolutePath());
containerLaunchContext.setCommands(commands);
StartContainerRequest scRequest = StartContainerRequest.newInstance(containerLaunchContext, TestContainerManager.createContainerToken(cId, DUMMY_RM_IDENTIFIER, context.getNodeId(), user, context.getContainerTokenSecretManager()));
List<StartContainerRequest> list = new ArrayList<StartContainerRequest>();
list.add(scRequest);
StartContainersRequest allRequests = StartContainersRequest.newInstance(list);
this.containerManager.startContainers(allRequests);
BaseContainerManagerTest.waitForContainerState(this.containerManager, cId, ContainerState.COMPLETE);
this.containerManager.handle(new CMgrCompletedAppsEvent(Arrays.asList(appId), CMgrCompletedAppsEvent.Reason.ON_SHUTDOWN));
this.containerManager.stop();
}
use of org.apache.hadoop.yarn.api.records.URL in project samza by apache.
the class LocalizerResourceMapper method createLocalResource.
private LocalResource createLocalResource(Path resourcePath, LocalResourceType resourceType, LocalResourceVisibility resourceVisibility) {
LocalResource localResource = Records.newRecord(LocalResource.class);
URL resourceUrl = ConverterUtils.getYarnUrlFromPath(resourcePath);
try {
FileStatus resourceFileStatus = resourcePath.getFileSystem(yarnConfiguration).getFileStatus(resourcePath);
if (null == resourceFileStatus) {
throw new LocalizerResourceException("Check getFileStatus implementation. getFileStatus gets unexpected null for resourcePath " + resourcePath);
}
localResource.setResource(resourceUrl);
log.info("setLocalizerResource for {}", resourceUrl);
localResource.setSize(resourceFileStatus.getLen());
localResource.setTimestamp(resourceFileStatus.getModificationTime());
localResource.setType(resourceType);
localResource.setVisibility(resourceVisibility);
return localResource;
} catch (IOException ioe) {
log.error("IO Exception when accessing the resource file status from the filesystem: " + resourcePath, ioe);
throw new LocalizerResourceException("IO Exception when accessing the resource file status from the filesystem: " + resourcePath);
}
}
Aggregations