use of io.hops.hopsworks.exceptions.JobException in project hopsworks by logicalclocks.
the class HopsUtils method prepJupyterNotebookConversion.
public static String prepJupyterNotebookConversion(Execution execution, DistributedFileSystemOps udfso) throws JobException {
String outPath = "hdfs://" + Utils.getProjectPath(execution.getJob().getProject().getName()) + Settings.PROJECT_STAGING_DIR;
String pyJobPath = outPath + "/jobs/" + execution.getJob().getName();
String pyAppPath = pyJobPath + "/" + execution.getId() + ".py";
Path pyJobDir = new Path(pyJobPath);
try {
if (udfso.exists(pyJobDir)) {
FileStatus fileStatus = udfso.getFileStatus(pyJobDir);
if (!fileStatus.isDirectory()) {
throw new JobException(RESTCodes.JobErrorCode.JOB_CREATION_ERROR, Level.INFO, "Failed to convert notebook - Job Directory name is used by a file");
}
} else {
udfso.mkdirs(new org.apache.hadoop.fs.Path(pyJobPath), udfso.getParentPermission(pyJobDir));
}
} catch (IOException e) {
throw new JobException(RESTCodes.JobErrorCode.JOB_CREATION_ERROR, Level.INFO, "Failed to convert notebook.", "HopsFS write failure.", e);
}
return pyAppPath;
}
use of io.hops.hopsworks.exceptions.JobException in project hopsworks by logicalclocks.
the class HopsUtils method cleanupExecutionDatasetResources.
public static void cleanupExecutionDatasetResources(Execution execution, String hdfsUsername, DistributedFsService dfs) throws JobException {
String outPath = "hdfs://" + Utils.getProjectPath(execution.getJob().getProject().getName()) + Settings.PROJECT_STAGING_DIR;
String pyJobPath = outPath + "/jobs/" + execution.getJob().getName();
String pyAppPath = pyJobPath + "/" + execution.getId() + ".py";
try {
removeFiles(pyAppPath, hdfsUsername, dfs);
} catch (DatasetException e) {
String msg = "failed to cleanup execution dataset resoureces";
throw new JobException(RESTCodes.JobErrorCode.JOB_DELETION_ERROR, Level.INFO, msg, msg, e);
}
}
use of io.hops.hopsworks.exceptions.JobException in project hopsworks by logicalclocks.
the class HopsUtils method cleanupJobDatasetResources.
public static void cleanupJobDatasetResources(Jobs job, String hdfsUsername, DistributedFsService dfs) throws JobException {
String outPath = "hdfs://" + Utils.getProjectPath(job.getProject().getName()) + Settings.PROJECT_STAGING_DIR;
String pyJobPath = outPath + "/jobs/" + job.getName();
try {
removeFiles(pyJobPath, hdfsUsername, dfs);
} catch (DatasetException e) {
String msg = "failed to cleanup job dataset resoureces";
throw new JobException(RESTCodes.JobErrorCode.JOB_DELETION_ERROR, Level.INFO, msg, msg, e);
}
}
Aggregations