use of com.baidu.hugegraph.exception.InternalException in project incubator-hugegraph-toolchain by apache.
the class FileMappingService method moveToNextLevelDir.
public String moveToNextLevelDir(FileMapping mapping) {
File currFile = new File(mapping.getPath());
String destPath = Paths.get(currFile.getParentFile().getPath(), FILE_PREIFX + mapping.getId()).toString();
File destDir = new File(destPath);
try {
FileUtils.moveFileToDirectory(currFile, destDir, true);
} catch (IOException e) {
this.remove(mapping.getId());
throw new InternalException("Failed to move file to next level directory");
}
return Paths.get(destPath, currFile.getName()).toString();
}
use of com.baidu.hugegraph.exception.InternalException in project incubator-hugegraph-toolchain by apache.
the class FileMappingService method extractColumns.
public void extractColumns(FileMapping mapping) {
File file = FileUtils.getFile(mapping.getPath());
BufferedReader reader;
try {
reader = new BufferedReader(new FileReader(file));
} catch (FileNotFoundException e) {
throw new InternalException("The file '%s' is not found", file);
}
FileSetting setting = mapping.getFileSetting();
String delimiter = setting.getDelimiter();
Pattern pattern = Pattern.compile(setting.getSkippedLine());
String[] columnNames;
String[] columnValues;
try {
String line;
while ((line = reader.readLine()) != null) {
if (!pattern.matcher(line).matches()) {
break;
}
}
Ex.check(line != null, "The file has no data line can treat as header");
String[] firstLine = StringUtils.split(line, delimiter);
if (setting.isHasHeader()) {
// The first line as column names
columnNames = firstLine;
// The second line as column values
line = reader.readLine();
columnValues = StringUtil.split(line, delimiter);
} else {
// Let columns names as: column-1, column-2 ...
columnNames = new String[firstLine.length];
for (int i = 1; i <= firstLine.length; i++) {
columnNames[i - 1] = "col-" + i;
}
// The first line as column values
columnValues = firstLine;
}
} catch (IOException e) {
throw new InternalException("Failed to read header and sample " + "data from file '%s'", file);
} finally {
IOUtils.closeQuietly(reader);
}
setting.setColumnNames(Arrays.asList(columnNames));
setting.setColumnValues(Arrays.asList(columnValues));
}
use of com.baidu.hugegraph.exception.InternalException in project incubator-hugegraph-toolchain by apache.
the class LoadTaskService method readLoadFailedReason.
public String readLoadFailedReason(FileMapping mapping) {
String path = mapping.getPath();
File parentDir = FileUtils.getFile(path).getParentFile();
File failureDataDir = FileUtils.getFile(parentDir, "mapping", "failure-data");
// list error data file
File[] errorFiles = failureDataDir.listFiles((dir, name) -> {
return name.endsWith("error");
});
if (errorFiles == null) {
return "For some reason, the error file was not generated. " + "Please check the log for details";
}
Ex.check(errorFiles.length == 1, "There should exist only one error file, actual is %s", errorFiles.length);
File errorFile = errorFiles[0];
try {
return FileUtils.readFileToString(errorFile);
} catch (IOException e) {
throw new InternalException("Failed to read error file %s", e, errorFile);
}
}
use of com.baidu.hugegraph.exception.InternalException in project incubator-hugegraph-toolchain by apache.
the class EntityUtil method merge.
@SuppressWarnings("unchecked")
public static <T extends Mergeable> T merge(T oldEntity, T newEntity) {
Class<?> clazz = oldEntity.getClass();
T entity;
try {
entity = (T) clazz.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new InternalException("reflect.new-instance.failed", e, clazz.getName());
}
Field[] fields = clazz.getDeclaredFields();
for (Field field : fields) {
// NOTE: Skip jacoco injected field
if (field.getName().startsWith("$")) {
continue;
}
MergeProperty property = field.getAnnotation(MergeProperty.class);
if (property == null) {
continue;
}
field.setAccessible(true);
try {
Object oldFieldValue = field.get(oldEntity);
Object newFieldValue = field.get(newEntity);
if (property.useNew()) {
if (property.ignoreNull() && newFieldValue == null) {
field.set(entity, oldFieldValue);
} else {
field.set(entity, newFieldValue);
}
} else {
field.set(entity, oldFieldValue);
}
} catch (IllegalAccessException e) {
throw new InternalException("reflect.access-field.failed", e, field.getName(), clazz.getName());
}
}
return entity;
}
use of com.baidu.hugegraph.exception.InternalException in project incubator-hugegraph-toolchain by apache.
the class ExecuteHistoryService method remove.
@Transactional(isolation = Isolation.READ_COMMITTED)
public void remove(int connId, int id) {
ExecuteHistory history = this.mapper.selectById(id);
HugeClient client = this.getClient(connId);
if (history.getType().equals(ExecuteType.GREMLIN_ASYNC)) {
client.task().delete(history.getAsyncId());
}
if (this.mapper.deleteById(id) != 1) {
throw new InternalException("entity.delete.failed", history);
}
}
Aggregations