use of com.baidu.hugegraph.entity.load.JobManager in project incubator-hugegraph-toolchain by apache.
the class JobManagerService method list.
public IPage<JobManager> list(int connId, int pageNo, int pageSize, String content) {
QueryWrapper<JobManager> query = Wrappers.query();
query.eq("conn_id", connId);
if (!content.isEmpty()) {
query.like("job_name", content);
}
query.orderByDesc("create_time");
Page<JobManager> page = new Page<>(pageNo, pageSize);
IPage<JobManager> list = this.mapper.selectPage(page, query);
list.getRecords().forEach(task -> {
if (task.getJobStatus() == JobStatus.LOADING) {
List<LoadTask> tasks = this.taskService.taskListByJob(task.getId());
JobStatus status = JobStatus.SUCCESS;
for (LoadTask loadTask : tasks) {
if (loadTask.getStatus().inRunning() || loadTask.getStatus() == LoadStatus.PAUSED || loadTask.getStatus() == LoadStatus.STOPPED) {
status = JobStatus.LOADING;
break;
}
if (loadTask.getStatus() == LoadStatus.FAILED) {
status = JobStatus.FAILED;
break;
}
}
if (status == JobStatus.SUCCESS || status == JobStatus.FAILED) {
task.setJobStatus(status);
this.update(task);
}
}
Date endDate = task.getJobStatus() == JobStatus.FAILED || task.getJobStatus() == JobStatus.SUCCESS ? task.getUpdateTime() : HubbleUtil.nowDate();
task.setJobDuration(endDate.getTime() - task.getCreateTime().getTime());
});
return list;
}
use of com.baidu.hugegraph.entity.load.JobManager in project incubator-hugegraph-toolchain by apache.
the class JobManagerController method reason.
@GetMapping("{id}/reason")
public Response reason(@PathVariable("connId") int connId, @PathVariable("id") int id) {
JobManager job = this.service.get(id);
if (job == null) {
throw new ExternalException("job.manager.not-exist.id", id);
}
List<LoadTask> tasks = this.taskService.batchTasks(job.getId());
List<JobManagerReasonResult> reasonResults = new ArrayList<>();
tasks.forEach(task -> {
JobManagerReasonResult reasonResult = new JobManagerReasonResult();
int fileId = task.getFileId();
String reason = "";
if (task.getStatus() == LoadStatus.FAILED) {
FileMapping mapping = this.fmService.get(fileId);
reason = this.taskService.readLoadFailedReason(mapping);
}
reasonResult.setTaskId(task.getJobId());
reasonResult.setFileId(task.getFileId());
reasonResult.setFileName(task.getFileName());
reasonResult.setReason(reason);
reasonResults.add(reasonResult);
});
return Response.builder().status(Constant.STATUS_OK).data(reasonResults).build();
}
use of com.baidu.hugegraph.entity.load.JobManager in project incubator-hugegraph-toolchain by apache.
the class JobManagerController method delete.
@DeleteMapping("{id}")
public void delete(@PathVariable("id") int id) {
JobManager task = this.service.get(id);
if (task == null) {
throw new ExternalException("job.manager.not-exist.id", id);
}
this.service.remove(id);
}
use of com.baidu.hugegraph.entity.load.JobManager in project incubator-hugegraph-toolchain by apache.
the class LoadTaskController method stop.
@PostMapping("stop")
public LoadTask stop(@PathVariable("connId") int connId, @PathVariable("jobId") int jobId, @RequestParam("task_id") int taskId) {
GraphConnection connection = this.connService.get(connId);
if (connection == null) {
throw new ExternalException("graph-connection.not-exist.id", connId);
}
JobManager jobEntity = this.jobService.get(jobId);
Ex.check(jobEntity != null, "job-manager.not-exist.id", jobId);
Ex.check(jobEntity.getJobStatus() == JobStatus.LOADING, "load.task.pause.no-permission");
try {
return this.service.stop(taskId);
} finally {
jobEntity.setJobStatus(JobStatus.LOADING);
jobEntity.setUpdateTime(HubbleUtil.nowDate());
this.jobService.update(jobEntity);
}
}
use of com.baidu.hugegraph.entity.load.JobManager in project incubator-hugegraph-toolchain by apache.
the class FileUploadController method upload.
@PostMapping
public FileUploadResult upload(@PathVariable("connId") int connId, @PathVariable("jobId") int jobId, @RequestParam("file") MultipartFile file, @RequestParam("name") String fileName, @RequestParam("token") String token, @RequestParam("total") int total, @RequestParam("index") int index) {
this.checkTotalAndIndexValid(total, index);
this.checkFileNameMatchToken(fileName, token);
JobManager jobEntity = this.jobService.get(jobId);
this.checkFileValid(connId, jobId, jobEntity, file, fileName);
if (jobEntity.getJobStatus() == JobStatus.DEFAULT) {
jobEntity.setJobStatus(JobStatus.UPLOADING);
this.jobService.update(jobEntity);
}
// Ensure location exist and generate file path
String filePath = this.generateFilePath(connId, jobId, fileName);
// Check this file deleted before
ReadWriteLock lock = this.uploadingTokenLocks().get(token);
FileUploadResult result;
if (lock == null) {
result = new FileUploadResult();
result.setName(file.getOriginalFilename());
result.setSize(file.getSize());
result.setStatus(FileUploadResult.Status.FAILURE);
result.setCause("File has been deleted");
return result;
}
lock.readLock().lock();
try {
result = this.service.uploadFile(file, index, filePath);
if (result.getStatus() == FileUploadResult.Status.FAILURE) {
return result;
}
synchronized (this.service) {
// Verify the existence of fragmented files
FileMapping mapping = this.service.get(connId, jobId, fileName);
if (mapping == null) {
mapping = new FileMapping(connId, fileName, filePath);
mapping.setJobId(jobId);
mapping.setFileStatus(FileMappingStatus.UPLOADING);
this.service.save(mapping);
} else {
if (mapping.getFileStatus() == FileMappingStatus.COMPLETED) {
result.setId(mapping.getId());
// Remove uploading file token
this.uploadingTokenLocks().remove(token);
return result;
} else {
mapping.setUpdateTime(HubbleUtil.nowDate());
}
}
// Determine whether all the parts have been uploaded, then merge them
boolean merged = this.service.tryMergePartFiles(filePath, total);
if (!merged) {
this.service.update(mapping);
return result;
}
// Read column names and values then fill it
this.service.extractColumns(mapping);
mapping.setFileStatus(FileMappingStatus.COMPLETED);
mapping.setTotalLines(FileUtil.countLines(mapping.getPath()));
mapping.setTotalSize(FileUtils.sizeOf(new File(mapping.getPath())));
// Move to the directory corresponding to the file mapping Id
String newPath = this.service.moveToNextLevelDir(mapping);
// Update file mapping stored path
mapping.setPath(newPath);
this.service.update(mapping);
// Update Job Manager size
long jobSize = jobEntity.getJobSize() + mapping.getTotalSize();
jobEntity.setJobSize(jobSize);
this.jobService.update(jobEntity);
result.setId(mapping.getId());
// Remove uploading file token
this.uploadingTokenLocks().remove(token);
}
return result;
} finally {
lock.readLock().unlock();
}
}
Aggregations