use of com.baidu.hugegraph.loader.mapping.LoadMapping in project incubator-hugegraph-toolchain by apache.
the class MappingConverter method main.
public static void main(String[] args) {
E.checkArgument(args.length == 1, "args: file");
String input = args[0];
LOG.info("Prepare to convert mapping file {}", input);
File file = FileUtils.getFile(input);
if (!file.exists() || !file.isFile()) {
LOG.error("The file '{}' doesn't exists or not a file", input);
throw new IllegalArgumentException(String.format("The file '%s' doesn't exists or not a file", input));
}
LoadMapping mapping = LoadMapping.of(input);
String outputPath = getOutputPath(file);
MappingUtil.write(mapping, outputPath);
LOG.info("Convert mapping file successfuly, stored at {}", outputPath);
}
use of com.baidu.hugegraph.loader.mapping.LoadMapping in project incubator-hugegraph-toolchain by apache.
the class MappingUtil method parseV1.
private static LoadMapping parseV1(String json) {
GraphStructV1 graphStruct = JsonUtil.fromJson(json, GraphStructV1.class);
Map<FileSourceKey, InputStruct> fileSourceInputStructs = InsertionOrderUtil.newMap();
List<InputStruct> jdbcSourceInputStructs = new ArrayList<>();
for (ElementStructV1 originStruct : graphStruct.structs()) {
InputSource inputSource = originStruct.input();
ElementMapping targetStruct = convertV1ToV2(originStruct);
SourceType type = inputSource.type();
if (type == SourceType.FILE || type == SourceType.HDFS) {
FileSource source = (FileSource) inputSource;
FileSourceKey key = new FileSourceKey(type, source.path());
fileSourceInputStructs.compute(key, (k, inputStruct) -> {
if (inputStruct == null) {
inputStruct = new InputStruct(null, null);
inputStruct.input(source);
}
inputStruct.add(targetStruct);
return inputStruct;
});
} else {
assert type == SourceType.JDBC;
InputStruct inputStruct = new InputStruct(null, null);
inputStruct.input(inputSource);
inputStruct.add(targetStruct);
jdbcSourceInputStructs.add(inputStruct);
}
}
// Generate id for every input mapping
List<InputStruct> inputStructs = new ArrayList<>();
int id = 0;
for (InputStruct inputStruct : fileSourceInputStructs.values()) {
inputStruct.id(String.valueOf(++id));
inputStructs.add(inputStruct);
}
for (InputStruct inputStruct : jdbcSourceInputStructs) {
inputStruct.id(String.valueOf(++id));
inputStructs.add(inputStruct);
}
return new LoadMapping(inputStructs);
}
use of com.baidu.hugegraph.loader.mapping.LoadMapping in project incubator-hugegraph-toolchain by apache.
the class LoadTaskService method buildLoadMapping.
private LoadMapping buildLoadMapping(GraphConnection connection, FileMapping fileMapping) {
FileSource source = this.buildFileSource(fileMapping);
List<com.baidu.hugegraph.loader.mapping.VertexMapping> vMappings;
vMappings = this.buildVertexMappings(connection, fileMapping);
List<com.baidu.hugegraph.loader.mapping.EdgeMapping> eMappings;
eMappings = this.buildEdgeMappings(connection, fileMapping);
InputStruct inputStruct = new InputStruct(vMappings, eMappings);
inputStruct.id("1");
inputStruct.input(source);
return new LoadMapping(ImmutableList.of(inputStruct));
}
use of com.baidu.hugegraph.loader.mapping.LoadMapping in project incubator-hugegraph-toolchain by apache.
the class LoadTaskService method buildLoadTask.
private LoadTask buildLoadTask(GraphConnection connection, FileMapping fileMapping) {
try {
LoadOptions options = this.buildLoadOptions(connection, fileMapping);
// NOTE: For simplicity, one file corresponds to one import task
LoadMapping mapping = this.buildLoadMapping(connection, fileMapping);
this.bindMappingToOptions(options, mapping, fileMapping.getPath());
return new LoadTask(options, connection, fileMapping);
} catch (Exception e) {
Throwable rootCause = Ex.rootCause(e);
throw new ExternalException("load.build-task.failed", rootCause);
}
}
Aggregations