use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.
the class ParquetFileLineFetcher method openReader.
@Override
public void openReader(Readable readable) {
Path path = readable.path();
try {
HadoopInputFile file = HadoopInputFile.fromPath(path, this.conf);
this.reader = ParquetFileReader.open(file);
this.schema = this.reader.getFooter().getFileMetaData().getSchema();
this.columnIO = new ColumnIOFactory().getColumnIO(this.schema);
} catch (IOException e) {
throw new LoadException("Failed to open parquet reader for '%s'", e, readable);
}
this.resetOffset();
}
use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.
the class RowFetcher method connect.
private Connection connect() throws SQLException {
String url = this.source.vendor().buildUrl(this.source);
LOG.info("Connect to database {}", url);
String driverName = this.source.driver();
String username = this.source.username();
String password = this.source.password();
try {
// Register JDBC driver
Class.forName(driverName);
} catch (ClassNotFoundException e) {
throw new LoadException("Invalid driver class '%s'", e, driverName);
}
return DriverManager.getConnection(url, username, password);
}
use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.
the class ParquetUtil method convertObject.
public static Object convertObject(Group group, int fieldIndex) {
Type fieldType = group.getType().getType(fieldIndex);
if (!fieldType.isPrimitive()) {
throw new LoadException("Unsupported rich object type %s", fieldType);
}
String fieldName = fieldType.getName();
// Field is no value
if (group.getFieldRepetitionCount(fieldName) == 0) {
return null;
}
Object object;
switch(fieldType.asPrimitiveType().getPrimitiveTypeName()) {
case INT32:
object = group.getInteger(fieldName, 0);
break;
case INT64:
object = group.getLong(fieldName, 0);
break;
case INT96:
object = dateFromInt96(group.getInt96(fieldName, 0));
break;
case FLOAT:
object = group.getFloat(fieldName, 0);
break;
case DOUBLE:
object = group.getDouble(fieldName, 0);
break;
case BOOLEAN:
object = group.getBoolean(fieldName, 0);
break;
default:
object = group.getValueToString(fieldIndex, 0);
break;
}
return object;
}
use of com.baidu.hugegraph.loader.exception.LoadException in project hugegraph-computer by hugegraph.
the class LoaderFileInputSplitFetcher method scanLocalPaths.
private List<String> scanLocalPaths(FileSource source) {
List<String> paths = new ArrayList<>();
File file = FileUtils.getFile(source.path());
FileFilter filter = source.filter();
if (file.isFile()) {
if (!filter.reserved(file.getName())) {
throw new LoadException("Please check file name and extensions, ensure " + "that at least one file is available for reading");
}
paths.add(file.getAbsolutePath());
} else {
assert file.isDirectory();
File[] subFiles = file.listFiles();
if (subFiles == null) {
throw new LoadException("Error while listing the files of " + "path '%s'", file);
}
for (File subFile : subFiles) {
if (filter.reserved(subFile.getName())) {
paths.add(subFile.getAbsolutePath());
}
}
}
return paths;
}
use of com.baidu.hugegraph.loader.exception.LoadException in project incubator-hugegraph-toolchain by apache.
the class HugeGraphLoader method createSchema.
private void createSchema() {
LoadOptions options = this.context.options();
if (!StringUtils.isEmpty(options.schema)) {
File file = FileUtils.getFile(options.schema);
HugeClient client = this.context.client();
GroovyExecutor groovyExecutor = new GroovyExecutor();
groovyExecutor.bind(Constants.GROOVY_SCHEMA, client.schema());
String script;
try {
script = FileUtils.readFileToString(file, Constants.CHARSET);
} catch (IOException e) {
throw new LoadException("Failed to read schema file '%s'", e, options.schema);
}
groovyExecutor.execute(script, client);
}
this.context.updateSchemaCache();
}
Aggregations