use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.
the class FailWriter method checkFileAvailable.
private static void checkFileAvailable(File file) {
if (!file.exists()) {
try {
file.getParentFile().mkdirs();
file.createNewFile();
return;
} catch (IOException e) {
throw new LoadException("Failed to create new file '%s'", e, file);
}
}
if (file.isDirectory()) {
throw new LoadException("Please ensure there is no directory " + "with the same name: '%s'", file);
} else {
if (file.length() > 0) {
LOG.warn("The existed file {} will be overwritten", file);
}
}
if (!file.canWrite()) {
throw new LoadException("Please ensure the existed file is " + "writable: '%s'", file);
}
}
use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.
the class LoadMapping method of.
public static LoadMapping of(String filePath) {
File file = FileUtils.getFile(filePath);
LoadMapping mapping;
try {
String json = FileUtils.readFileToString(file, Constants.CHARSET);
mapping = MappingUtil.parse(json);
} catch (IOException e) {
throw new LoadException("Failed to read mapping mapping file '%s'", e, filePath);
} catch (IllegalArgumentException e) {
throw new LoadException("Failed to parse mapping mapping file '%s'", e, filePath);
}
try {
mapping.check();
} catch (IllegalArgumentException e) {
throw new LoadException("Invalid mapping file '%s'", e, filePath);
}
return mapping;
}
use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.
the class JDBCReader method hasNext.
@Override
public boolean hasNext() {
if (this.batch == null || this.offsetInBatch >= this.batch.size()) {
try {
this.batch = this.fetcher.nextBatch();
this.offsetInBatch = 0;
} catch (Exception e) {
throw new LoadException("Error while reading the next row", e);
}
}
return this.batch != null && !this.batch.isEmpty();
}
use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.
the class FailLogger method removeDupLines.
private void removeDupLines() {
Charset charset = Charset.forName(this.struct.input().charset());
File dedupFile = new File(this.file.getAbsolutePath() + Constants.DEDUP_SUFFIX);
try (InputStream is = new FileInputStream(this.file);
Reader ir = new InputStreamReader(is, charset);
BufferedReader reader = new BufferedReader(ir);
// upper is input, below is output
OutputStream os = new FileOutputStream(dedupFile);
Writer ow = new OutputStreamWriter(os, charset);
BufferedWriter writer = new BufferedWriter(ow)) {
Set<Integer> writedLines = new HashSet<>();
HashFunction hashFunc = Hashing.murmur3_32();
for (String tipsLine, dataLine; (tipsLine = reader.readLine()) != null && (dataLine = reader.readLine()) != null; ) {
/*
* Hash data line to remove duplicate lines
* Misjudgment may occur, but the probability is extremely low
*/
int hash = hashFunc.hashString(dataLine, charset).asInt();
if (!writedLines.contains(hash)) {
writer.write(tipsLine);
writer.newLine();
writer.write(dataLine);
writer.newLine();
// Save the hash value of writed line
writedLines.add(hash);
}
}
} catch (IOException e) {
throw new LoadException("Failed to remove duplicate lines");
}
if (!dedupFile.renameTo(this.file)) {
throw new LoadException("Failed to rename dedup file to origin");
}
}
use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.
the class FileLineFetcher method skipOffset.
public void skipOffset(Readable readable, long offset) {
if (offset <= 0) {
return;
}
E.checkState(this.reader != null, "The reader shouldn't be null");
try {
for (long i = 0L; i < offset; i++) {
this.reader.readLine();
}
} catch (IOException e) {
throw new LoadException("Failed to skip the first %s lines " + "of file %s, please ensure the file " + "must have at least %s lines", e, offset, readable, offset);
}
this.addOffset(offset);
}
Aggregations