use of org.apache.hadoop.fs.FileContext in project hadoop by apache.
the class JobHistoryUtils method ensurePathInDefaultFileSystem.
/**
* Ensure that path belongs to cluster's default file system unless
* 1. it is already fully qualified.
* 2. current job configuration uses default file system
* 3. running from a test case without core-site.xml
*
* @param sourcePath source path
* @param conf the job configuration
* @return full qualified path (if necessary) in default file system
*/
private static String ensurePathInDefaultFileSystem(String sourcePath, Configuration conf) {
Path path = new Path(sourcePath);
FileContext fc = getDefaultFileContext();
if (fc == null || fc.getDefaultFileSystem().getUri().toString().equals(conf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "")) || path.toUri().getAuthority() != null || path.toUri().getScheme() != null) {
return sourcePath;
}
return fc.makeQualified(path).toString();
}
use of org.apache.hadoop.fs.FileContext in project hadoop by apache.
the class LoadGenerator method loadScriptFile.
/**
* Read a script file of the form: lines of text with duration in seconds,
* read probability and write probability, separated by white space.
*
* @param filename Script file
* @return 0 if successful, -1 if not
* @throws IOException if errors with file IO
*/
protected static int loadScriptFile(String filename, boolean readLocally) throws IOException {
FileContext fc;
if (readLocally) {
// read locally - program is run without MR
fc = FileContext.getLocalFSFileContext();
} else {
// use default file system
fc = FileContext.getFileContext();
}
DataInputStream in = null;
try {
in = fc.open(new Path(filename));
} catch (IOException e) {
System.err.println("Unable to open scriptFile: " + filename);
System.exit(-1);
}
InputStreamReader inr = new InputStreamReader(in);
BufferedReader br = new BufferedReader(inr);
ArrayList<Long> duration = new ArrayList<Long>();
ArrayList<Double> readProb = new ArrayList<Double>();
ArrayList<Double> writeProb = new ArrayList<Double>();
int lineNum = 0;
String line;
try {
while ((line = br.readLine()) != null) {
lineNum++;
if (// skip comments and blanks
line.startsWith("#") || line.isEmpty())
continue;
parseScriptLine(line, duration, readProb, writeProb);
}
} catch (IllegalArgumentException e) {
System.err.println("Line: " + lineNum + ", " + e.getMessage());
return -1;
} finally {
IOUtils.cleanup(LOG, br);
}
// Copy vectors to arrays of values, to avoid autoboxing overhead later
durations = new long[duration.size()];
readProbs = new double[readProb.size()];
writeProbs = new double[writeProb.size()];
for (int i = 0; i < durations.length; i++) {
durations[i] = duration.get(i);
readProbs[i] = readProb.get(i);
writeProbs[i] = writeProb.get(i);
}
if (durations[0] == 0)
System.err.println("Initial duration set to 0. " + "Will loop until stopped manually.");
return 0;
}
use of org.apache.hadoop.fs.FileContext in project hadoop by apache.
the class ViewFsTestSetup method tearDownForViewFsLocalFs.
/**
*
* delete the test directory in the target local fs
*/
public static void tearDownForViewFsLocalFs(FileContextTestHelper helper) throws Exception {
FileContext fclocal = FileContext.getLocalFSFileContext();
Path targetOfTests = helper.getTestRootPath(fclocal);
fclocal.delete(targetOfTests, true);
}
use of org.apache.hadoop.fs.FileContext in project hadoop by apache.
the class ViewFsTestSetup method setupForViewFsLocalFs.
/*
* return the ViewFS File context to be used for tests
*/
public static FileContext setupForViewFsLocalFs(FileContextTestHelper helper) throws Exception {
/**
* create the test root on local_fs - the mount table will point here
*/
FileContext fsTarget = FileContext.getLocalFSFileContext();
Path targetOfTests = helper.getTestRootPath(fsTarget);
// In case previous test was killed before cleanup
fsTarget.delete(targetOfTests, true);
fsTarget.mkdir(targetOfTests, FileContext.DEFAULT_PERM, true);
Configuration conf = new Configuration();
// Set up viewfs link for test dir as described above
String testDir = helper.getTestRootPath(fsTarget).toUri().getPath();
linkUpFirstComponents(conf, testDir, fsTarget, "test dir");
// Set up viewfs link for home dir as described above
setUpHomeDir(conf, fsTarget);
// the test path may be relative to working dir - we need to make that work:
// Set up viewfs link for wd as described above
String wdDir = fsTarget.getWorkingDirectory().toUri().getPath();
linkUpFirstComponents(conf, wdDir, fsTarget, "working dir");
FileContext fc = FileContext.getFileContext(FsConstants.VIEWFS_URI, conf);
// in case testdir relative to wd.
fc.setWorkingDirectory(new Path(wdDir));
Log.getLog().info("Working dir is: " + fc.getWorkingDirectory());
//System.out.println("TargetOfTests = "+ targetOfTests.toUri());
return fc;
}
use of org.apache.hadoop.fs.FileContext in project hadoop by apache.
the class TestJobHistoryParsing method testDiagnosticsForKilledJob.
@Test(timeout = 60000)
public void testDiagnosticsForKilledJob() throws Exception {
LOG.info("STARTING testDiagnosticsForKilledJob");
try {
final Configuration conf = new Configuration();
conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
RackResolver.init(conf);
MRApp app = new MRAppWithHistoryWithJobKilled(2, 1, true, this.getClass().getName(), true);
app.submit(conf);
Job job = app.getContext().getAllJobs().values().iterator().next();
JobId jobId = job.getID();
app.waitForState(job, JobState.KILLED);
// make sure all events are flushed
app.waitForState(Service.STATE.STOPPED);
JobHistory jobHistory = new JobHistory();
jobHistory.init(conf);
HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
JobHistoryParser parser;
JobInfo jobInfo;
synchronized (fileInfo) {
Path historyFilePath = fileInfo.getHistoryFile();
FSDataInputStream in = null;
FileContext fc = null;
try {
fc = FileContext.getFileContext(conf);
in = fc.open(fc.makeQualified(historyFilePath));
} catch (IOException ioe) {
LOG.info("Can not open history file: " + historyFilePath, ioe);
throw (new Exception("Can not open History File"));
}
parser = new JobHistoryParser(in);
jobInfo = parser.parse();
}
Exception parseException = parser.getParseException();
assertNull("Caught an expected exception " + parseException, parseException);
final List<String> originalDiagnostics = job.getDiagnostics();
final String historyError = jobInfo.getErrorInfo();
assertTrue("No original diagnostics for a failed job", originalDiagnostics != null && !originalDiagnostics.isEmpty());
assertNotNull("No history error info for a failed job ", historyError);
for (String diagString : originalDiagnostics) {
assertTrue(historyError.contains(diagString));
}
assertTrue("No killed message in diagnostics", historyError.contains(JobImpl.JOB_KILLED_DIAG));
} finally {
LOG.info("FINISHED testDiagnosticsForKilledJob");
}
}
Aggregations