use of com.baidu.hugegraph.loader.mapping.InputStruct in project incubator-hugegraph-toolchain by apache.
the class MappingUtil method parseV1.
private static LoadMapping parseV1(String json) {
GraphStructV1 graphStruct = JsonUtil.fromJson(json, GraphStructV1.class);
Map<FileSourceKey, InputStruct> fileSourceInputStructs = InsertionOrderUtil.newMap();
List<InputStruct> jdbcSourceInputStructs = new ArrayList<>();
for (ElementStructV1 originStruct : graphStruct.structs()) {
InputSource inputSource = originStruct.input();
ElementMapping targetStruct = convertV1ToV2(originStruct);
SourceType type = inputSource.type();
if (type == SourceType.FILE || type == SourceType.HDFS) {
FileSource source = (FileSource) inputSource;
FileSourceKey key = new FileSourceKey(type, source.path());
fileSourceInputStructs.compute(key, (k, inputStruct) -> {
if (inputStruct == null) {
inputStruct = new InputStruct(null, null);
inputStruct.input(source);
}
inputStruct.add(targetStruct);
return inputStruct;
});
} else {
assert type == SourceType.JDBC;
InputStruct inputStruct = new InputStruct(null, null);
inputStruct.input(inputSource);
inputStruct.add(targetStruct);
jdbcSourceInputStructs.add(inputStruct);
}
}
// Generate id for every input mapping
List<InputStruct> inputStructs = new ArrayList<>();
int id = 0;
for (InputStruct inputStruct : fileSourceInputStructs.values()) {
inputStruct.id(String.valueOf(++id));
inputStructs.add(inputStruct);
}
for (InputStruct inputStruct : jdbcSourceInputStructs) {
inputStruct.id(String.valueOf(++id));
inputStructs.add(inputStruct);
}
return new LoadMapping(inputStructs);
}
use of com.baidu.hugegraph.loader.mapping.InputStruct in project hugegraph-computer by hugegraph.
the class LoaderFileInputSplitFetcher method fetchEdgeInputSplits.
@Override
public List<InputSplit> fetchEdgeInputSplits() {
List<InputSplit> splits = new ArrayList<>();
for (InputStruct edgeInputStruct : this.edgeInputStructs) {
FileSource source = (FileSource) edgeInputStruct.input();
List<String> paths = this.scanPaths(source);
if (CollectionUtils.isNotEmpty(paths)) {
for (String path : paths) {
FileInputSplit split = new FileInputSplit(ElemType.EDGE, edgeInputStruct, path);
splits.add(split);
}
}
}
return splits;
}
use of com.baidu.hugegraph.loader.mapping.InputStruct in project hugegraph-computer by hugegraph.
the class FileInputSplitTest method testConstructor.
@Test
public void testConstructor() {
InputStruct inputStruct = Mockito.mock(InputStruct.class);
FileInputSplit split = new FileInputSplit(ElemType.VERTEX, inputStruct, "/tmp/test");
Assert.assertEquals("/tmp/test", split.path());
Assert.assertEquals(inputStruct, split.struct());
Assert.assertSame(ElemType.VERTEX, split.type());
}
use of com.baidu.hugegraph.loader.mapping.InputStruct in project hugegraph-computer by hugegraph.
the class FileInputSplitTest method testHashCode.
@Test
public void testHashCode() {
InputStruct inputStruct = Mockito.mock(InputStruct.class);
FileInputSplit split1 = new FileInputSplit(ElemType.VERTEX, inputStruct, "/tmp/test");
FileInputSplit split2 = new FileInputSplit(ElemType.VERTEX, inputStruct, "/tmp/test");
Assert.assertEquals(split1.hashCode(), split2.hashCode());
}
use of com.baidu.hugegraph.loader.mapping.InputStruct in project hugegraph-computer by hugegraph.
the class FileInputSplitTest method testEquals.
@Test
public void testEquals() {
InputStruct inputStruct = Mockito.mock(InputStruct.class);
FileInputSplit split1 = new FileInputSplit(ElemType.VERTEX, inputStruct, "/tmp/test");
FileInputSplit split2 = new FileInputSplit(ElemType.VERTEX, inputStruct, "/tmp/test");
Assert.assertEquals(split1, split1);
Assert.assertEquals(split1, split2);
Assert.assertNotEquals(split1, null);
Assert.assertNotEquals(split1, new Object());
Assert.assertEquals(InputSplit.END_SPLIT, InputSplit.END_SPLIT);
Assert.assertNotEquals(InputSplit.END_SPLIT, split1);
}
Aggregations