Search in sources :

Example 21 with LoadException

use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.

the class FailWriter method checkFileAvailable.

private static void checkFileAvailable(File file) {
    if (!file.exists()) {
        try {
            file.getParentFile().mkdirs();
            file.createNewFile();
            return;
        } catch (IOException e) {
            throw new LoadException("Failed to create new file '%s'", e, file);
        }
    }
    if (file.isDirectory()) {
        throw new LoadException("Please ensure there is no directory " + "with the same name: '%s'", file);
    } else {
        if (file.length() > 0) {
            LOG.warn("The existed file {} will be overwritten", file);
        }
    }
    if (!file.canWrite()) {
        throw new LoadException("Please ensure the existed file is " + "writable: '%s'", file);
    }
}
Also used : IOException(java.io.IOException) LoadException(com.baidu.hugegraph.loader.exception.LoadException)

Example 22 with LoadException

use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.

the class LoadMapping method of.

public static LoadMapping of(String filePath) {
    File file = FileUtils.getFile(filePath);
    LoadMapping mapping;
    try {
        String json = FileUtils.readFileToString(file, Constants.CHARSET);
        mapping = MappingUtil.parse(json);
    } catch (IOException e) {
        throw new LoadException("Failed to read mapping mapping file '%s'", e, filePath);
    } catch (IllegalArgumentException e) {
        throw new LoadException("Failed to parse mapping mapping file '%s'", e, filePath);
    }
    try {
        mapping.check();
    } catch (IllegalArgumentException e) {
        throw new LoadException("Invalid mapping file '%s'", e, filePath);
    }
    return mapping;
}
Also used : IOException(java.io.IOException) File(java.io.File) LoadException(com.baidu.hugegraph.loader.exception.LoadException)

Example 23 with LoadException

use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.

the class JDBCReader method hasNext.

@Override
public boolean hasNext() {
    if (this.batch == null || this.offsetInBatch >= this.batch.size()) {
        try {
            this.batch = this.fetcher.nextBatch();
            this.offsetInBatch = 0;
        } catch (Exception e) {
            throw new LoadException("Error while reading the next row", e);
        }
    }
    return this.batch != null && !this.batch.isEmpty();
}
Also used : SQLException(java.sql.SQLException) InitException(com.baidu.hugegraph.loader.exception.InitException) NoSuchElementException(java.util.NoSuchElementException) LoadException(com.baidu.hugegraph.loader.exception.LoadException) LoadException(com.baidu.hugegraph.loader.exception.LoadException)

Example 24 with LoadException

use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.

the class FailLogger method removeDupLines.

private void removeDupLines() {
    Charset charset = Charset.forName(this.struct.input().charset());
    File dedupFile = new File(this.file.getAbsolutePath() + Constants.DEDUP_SUFFIX);
    try (InputStream is = new FileInputStream(this.file);
        Reader ir = new InputStreamReader(is, charset);
        BufferedReader reader = new BufferedReader(ir);
        // upper is input, below is output
        OutputStream os = new FileOutputStream(dedupFile);
        Writer ow = new OutputStreamWriter(os, charset);
        BufferedWriter writer = new BufferedWriter(ow)) {
        Set<Integer> writedLines = new HashSet<>();
        HashFunction hashFunc = Hashing.murmur3_32();
        for (String tipsLine, dataLine; (tipsLine = reader.readLine()) != null && (dataLine = reader.readLine()) != null; ) {
            /*
                 * Hash data line to remove duplicate lines
                 * Misjudgment may occur, but the probability is extremely low
                 */
            int hash = hashFunc.hashString(dataLine, charset).asInt();
            if (!writedLines.contains(hash)) {
                writer.write(tipsLine);
                writer.newLine();
                writer.write(dataLine);
                writer.newLine();
                // Save the hash value of writed line
                writedLines.add(hash);
            }
        }
    } catch (IOException e) {
        throw new LoadException("Failed to remove duplicate lines");
    }
    if (!dedupFile.renameTo(this.file)) {
        throw new LoadException("Failed to rename dedup file to origin");
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) Charset(java.nio.charset.Charset) Reader(java.io.Reader) InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream) BufferedWriter(java.io.BufferedWriter) LoadException(com.baidu.hugegraph.loader.exception.LoadException) HashFunction(com.google.common.hash.HashFunction) FileOutputStream(java.io.FileOutputStream) BufferedReader(java.io.BufferedReader) OutputStreamWriter(java.io.OutputStreamWriter) File(java.io.File) OutputStreamWriter(java.io.OutputStreamWriter) BufferedWriter(java.io.BufferedWriter) Writer(java.io.Writer) HashSet(java.util.HashSet)

Example 25 with LoadException

use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.

the class FileLineFetcher method skipOffset.

public void skipOffset(Readable readable, long offset) {
    if (offset <= 0) {
        return;
    }
    E.checkState(this.reader != null, "The reader shouldn't be null");
    try {
        for (long i = 0L; i < offset; i++) {
            this.reader.readLine();
        }
    } catch (IOException e) {
        throw new LoadException("Failed to skip the first %s lines " + "of file %s, please ensure the file " + "must have at least %s lines", e, offset, readable, offset);
    }
    this.addOffset(offset);
}
Also used : IOException(java.io.IOException) LoadException(com.baidu.hugegraph.loader.exception.LoadException)

Aggregations

LoadException (com.baidu.hugegraph.loader.exception.LoadException)32 IOException (java.io.IOException)18 File (java.io.File)10 SQLException (java.sql.SQLException)4 ArrayList (java.util.ArrayList)4 FileFilter (com.baidu.hugegraph.loader.source.file.FileFilter)3 Path (org.apache.hadoop.fs.Path)3 ServerException (com.baidu.hugegraph.exception.ServerException)2 InitException (com.baidu.hugegraph.loader.exception.InitException)2 LoadOptions (com.baidu.hugegraph.loader.executor.LoadOptions)2 LoadSummary (com.baidu.hugegraph.loader.metrics.LoadSummary)2 Readable (com.baidu.hugegraph.loader.reader.Readable)2 InputStream (java.io.InputStream)2 InputStreamReader (java.io.InputStreamReader)2 CompressorInputStream (org.apache.commons.compress.compressors.CompressorInputStream)2 CompressionInputStream (org.apache.hadoop.io.compress.CompressionInputStream)2 HugeClient (com.baidu.hugegraph.driver.HugeClient)1 HugeClientBuilder (com.baidu.hugegraph.driver.HugeClientBuilder)1 GroovyExecutor (com.baidu.hugegraph.loader.executor.GroovyExecutor)1 InputStruct (com.baidu.hugegraph.loader.mapping.InputStruct)1