use of com.baidu.hugegraph.entity.load.FileMapping in project incubator-hugegraph-toolchain by apache.
the class FileMappingController method fileSetting.
@PostMapping("{id}/file-setting")
public FileMapping fileSetting(@PathVariable("id") int id, @RequestBody FileSetting newEntity) {
Ex.check(!StringUtils.isEmpty(newEntity.getDelimiter()), "load.file-mapping.file-setting.delimiter-cannot-be-empty");
Ex.check(!StringUtils.isEmpty(newEntity.getCharset()), "load.file-mapping.file-setting.charset-cannot-be-empty");
Ex.check(!StringUtils.isEmpty(newEntity.getDateFormat()), "load.file-mapping.file-setting.dateformat-cannot-be-empty");
Ex.check(!StringUtils.isEmpty(newEntity.getTimeZone()), "load.file-mapping.file-setting.timezone-cannot-be-empty");
Ex.check(!StringUtils.isEmpty(newEntity.getSkippedLine()), "load.file-mapping.file-setting.skippedline-cannot-be-empty");
FileMapping mapping = this.service.get(id);
if (mapping == null) {
throw new ExternalException("load.file-mapping.not-exist.id", id);
}
// Change format to TEXT if needed
newEntity.changeFormatIfNeeded();
FileSetting oldEntity = mapping.getFileSetting();
FileSetting entity = this.mergeEntity(oldEntity, newEntity);
mapping.setFileSetting(entity);
// Read column names and values then fill it
this.service.extractColumns(mapping);
this.service.update(mapping);
return mapping;
}
use of com.baidu.hugegraph.entity.load.FileMapping in project incubator-hugegraph-toolchain by apache.
the class FileMappingController method addEdgeMapping.
@PostMapping("{id}/edge-mappings")
public FileMapping addEdgeMapping(@PathVariable("connId") int connId, @PathVariable("id") int id, @RequestBody EdgeMapping newEntity) {
FileMapping mapping = this.service.get(id);
if (mapping == null) {
throw new ExternalException("load.file-mapping.not-exist.id", id);
}
this.checkEdgeMappingValid(connId, newEntity, mapping);
newEntity.setId(HubbleUtil.generateSimpleId());
mapping.getEdgeMappings().add(newEntity);
this.service.update(mapping);
return mapping;
}
use of com.baidu.hugegraph.entity.load.FileMapping in project incubator-hugegraph-toolchain by apache.
the class FileMappingController method loadParameter.
/**
* TODO: All file mapping share one load paramter now, should be separated
* in actually
*/
@PostMapping("load-parameter")
public void loadParameter(@RequestBody LoadParameter newEntity) {
this.checkLoadParameter(newEntity);
List<FileMapping> mappings = this.service.listAll();
for (FileMapping mapping : mappings) {
LoadParameter oldEntity = mapping.getLoadParameter();
LoadParameter entity = this.mergeEntity(oldEntity, newEntity);
mapping.setLoadParameter(entity);
this.service.update(mapping);
}
}
use of com.baidu.hugegraph.entity.load.FileMapping in project incubator-hugegraph-toolchain by apache.
the class JobManagerController method reason.
@GetMapping("{id}/reason")
public Response reason(@PathVariable("connId") int connId, @PathVariable("id") int id) {
JobManager job = this.service.get(id);
if (job == null) {
throw new ExternalException("job.manager.not-exist.id", id);
}
List<LoadTask> tasks = this.taskService.batchTasks(job.getId());
List<JobManagerReasonResult> reasonResults = new ArrayList<>();
tasks.forEach(task -> {
JobManagerReasonResult reasonResult = new JobManagerReasonResult();
int fileId = task.getFileId();
String reason = "";
if (task.getStatus() == LoadStatus.FAILED) {
FileMapping mapping = this.fmService.get(fileId);
reason = this.taskService.readLoadFailedReason(mapping);
}
reasonResult.setTaskId(task.getJobId());
reasonResult.setFileId(task.getFileId());
reasonResult.setFileName(task.getFileName());
reasonResult.setReason(reason);
reasonResults.add(reasonResult);
});
return Response.builder().status(Constant.STATUS_OK).data(reasonResults).build();
}
use of com.baidu.hugegraph.entity.load.FileMapping in project incubator-hugegraph-toolchain by apache.
the class FileUploadController method upload.
@PostMapping
public FileUploadResult upload(@PathVariable("connId") int connId, @PathVariable("jobId") int jobId, @RequestParam("file") MultipartFile file, @RequestParam("name") String fileName, @RequestParam("token") String token, @RequestParam("total") int total, @RequestParam("index") int index) {
this.checkTotalAndIndexValid(total, index);
this.checkFileNameMatchToken(fileName, token);
JobManager jobEntity = this.jobService.get(jobId);
this.checkFileValid(connId, jobId, jobEntity, file, fileName);
if (jobEntity.getJobStatus() == JobStatus.DEFAULT) {
jobEntity.setJobStatus(JobStatus.UPLOADING);
this.jobService.update(jobEntity);
}
// Ensure location exist and generate file path
String filePath = this.generateFilePath(connId, jobId, fileName);
// Check this file deleted before
ReadWriteLock lock = this.uploadingTokenLocks().get(token);
FileUploadResult result;
if (lock == null) {
result = new FileUploadResult();
result.setName(file.getOriginalFilename());
result.setSize(file.getSize());
result.setStatus(FileUploadResult.Status.FAILURE);
result.setCause("File has been deleted");
return result;
}
lock.readLock().lock();
try {
result = this.service.uploadFile(file, index, filePath);
if (result.getStatus() == FileUploadResult.Status.FAILURE) {
return result;
}
synchronized (this.service) {
// Verify the existence of fragmented files
FileMapping mapping = this.service.get(connId, jobId, fileName);
if (mapping == null) {
mapping = new FileMapping(connId, fileName, filePath);
mapping.setJobId(jobId);
mapping.setFileStatus(FileMappingStatus.UPLOADING);
this.service.save(mapping);
} else {
if (mapping.getFileStatus() == FileMappingStatus.COMPLETED) {
result.setId(mapping.getId());
// Remove uploading file token
this.uploadingTokenLocks().remove(token);
return result;
} else {
mapping.setUpdateTime(HubbleUtil.nowDate());
}
}
// Determine whether all the parts have been uploaded, then merge them
boolean merged = this.service.tryMergePartFiles(filePath, total);
if (!merged) {
this.service.update(mapping);
return result;
}
// Read column names and values then fill it
this.service.extractColumns(mapping);
mapping.setFileStatus(FileMappingStatus.COMPLETED);
mapping.setTotalLines(FileUtil.countLines(mapping.getPath()));
mapping.setTotalSize(FileUtils.sizeOf(new File(mapping.getPath())));
// Move to the directory corresponding to the file mapping Id
String newPath = this.service.moveToNextLevelDir(mapping);
// Update file mapping stored path
mapping.setPath(newPath);
this.service.update(mapping);
// Update Job Manager size
long jobSize = jobEntity.getJobSize() + mapping.getTotalSize();
jobEntity.setJobSize(jobSize);
this.jobService.update(jobEntity);
result.setId(mapping.getId());
// Remove uploading file token
this.uploadingTokenLocks().remove(token);
}
return result;
} finally {
lock.readLock().unlock();
}
}
Aggregations