use of com.baidu.hugegraph.loader.source.SourceType in project incubator-hugegraph-toolchain by apache.
the class MappingUtil method parseV1.
private static LoadMapping parseV1(String json) {
GraphStructV1 graphStruct = JsonUtil.fromJson(json, GraphStructV1.class);
Map<FileSourceKey, InputStruct> fileSourceInputStructs = InsertionOrderUtil.newMap();
List<InputStruct> jdbcSourceInputStructs = new ArrayList<>();
for (ElementStructV1 originStruct : graphStruct.structs()) {
InputSource inputSource = originStruct.input();
ElementMapping targetStruct = convertV1ToV2(originStruct);
SourceType type = inputSource.type();
if (type == SourceType.FILE || type == SourceType.HDFS) {
FileSource source = (FileSource) inputSource;
FileSourceKey key = new FileSourceKey(type, source.path());
fileSourceInputStructs.compute(key, (k, inputStruct) -> {
if (inputStruct == null) {
inputStruct = new InputStruct(null, null);
inputStruct.input(source);
}
inputStruct.add(targetStruct);
return inputStruct;
});
} else {
assert type == SourceType.JDBC;
InputStruct inputStruct = new InputStruct(null, null);
inputStruct.input(inputSource);
inputStruct.add(targetStruct);
jdbcSourceInputStructs.add(inputStruct);
}
}
// Generate id for every input mapping
List<InputStruct> inputStructs = new ArrayList<>();
int id = 0;
for (InputStruct inputStruct : fileSourceInputStructs.values()) {
inputStruct.id(String.valueOf(++id));
inputStructs.add(inputStruct);
}
for (InputStruct inputStruct : jdbcSourceInputStructs) {
inputStruct.id(String.valueOf(++id));
inputStructs.add(inputStruct);
}
return new LoadMapping(inputStructs);
}
use of com.baidu.hugegraph.loader.source.SourceType in project incubator-hugegraph-toolchain by apache.
the class InputProgressDeser method readInputProgress.
@SuppressWarnings("unchecked")
private static InputProgress readInputProgress(JsonNode node) {
JsonNode typeNode = getNode(node, FIELD_TYPE, JsonNodeType.STRING);
String type = typeNode.asText().toUpperCase();
SourceType sourceType = SourceType.valueOf(type);
JsonNode loadedItemsNode = getNode(node, FIELD_LOADED_ITEMS, JsonNodeType.ARRAY);
JsonNode loadingItemNode = getNode(node, FIELD_LOADING_ITEM, JsonNodeType.OBJECT, JsonNodeType.NULL);
Set<InputItemProgress> loadedItems;
InputItemProgress loadingItem;
switch(sourceType) {
case FILE:
case HDFS:
loadedItems = (Set<InputItemProgress>) (Object) JsonUtil.convertSet(loadedItemsNode, FileItemProgress.class);
loadingItem = JsonUtil.convert(loadingItemNode, FileItemProgress.class);
break;
case JDBC:
default:
throw new AssertionError(String.format("Unsupported input source '%s'", type));
}
return new InputProgress(sourceType, loadedItems, loadingItem);
}
use of com.baidu.hugegraph.loader.source.SourceType in project incubator-hugegraph-toolchain by apache.
the class InputSourceDeser method readInputSource.
private static InputSource readInputSource(JsonNode node) {
JsonNode typeNode = getNode(node, FIELD_TYPE, JsonNodeType.STRING);
String type = typeNode.asText().toUpperCase();
SourceType sourceType = SourceType.valueOf(type);
assert node instanceof ObjectNode;
ObjectNode objectNode = (ObjectNode) node;
// The node 'type' doesn't participate in deserialization
objectNode.remove(FIELD_TYPE);
switch(sourceType) {
case FILE:
return JsonUtil.convert(node, FileSource.class);
case HDFS:
return JsonUtil.convert(node, HDFSSource.class);
case JDBC:
JsonNode vendorNode = getNode(node, FIELD_VENDOR, JsonNodeType.STRING);
vendorNode = TextNode.valueOf(vendorNode.asText().toUpperCase());
objectNode.replace(FIELD_VENDOR, vendorNode);
return JsonUtil.convert(node, JDBCSource.class);
default:
throw new AssertionError(String.format("Unsupported input source '%s'", type));
}
}
Aggregations