use of org.neo4j.unsafe.impl.batchimport.cache.NodeRelationshipCache in project neo4j by neo4j.
the class CalculateDenseNodesStepTest method shouldCollectBadRelationships.
@Test
public void shouldCollectBadRelationships() throws Exception {
// GIVEN
NodeRelationshipCache cache = mock(NodeRelationshipCache.class);
Collector collector = mock(Collector.class);
try (CalculateDenseNodesStep step = new CalculateDenseNodesStep(mock(StageControl.class), DEFAULT, cache, collector)) {
step.processors(4);
step.start(0);
// WHEN
Batch<InputRelationship, RelationshipRecord> batch = batch(relationship(1, 5), relationship(3, 10), // <-- bad relationship with missing start node
relationship("a", 2, -1, 2), // <-- bad relationship with missing end node
relationship(2, "b", 2, -1), // <-- bad relationship with missing start and end node
relationship("c", "d", -1, -1));
step.receive(0, batch);
step.endOfUpstream();
while (!step.isCompleted()) {
//wait
}
// THEN
verify(collector, times(1)).collectBadRelationship(any(InputRelationship.class), eq("a"));
verify(collector, times(1)).collectBadRelationship(any(InputRelationship.class), eq("b"));
verify(collector, times(1)).collectBadRelationship(any(InputRelationship.class), eq("c"));
verify(collector, times(1)).collectBadRelationship(any(InputRelationship.class), eq("d"));
}
}
use of org.neo4j.unsafe.impl.batchimport.cache.NodeRelationshipCache in project neo4j by neo4j.
the class CalculateDenseNodesStepTest method shouldNotProcessLoopsTwice.
@Test
public void shouldNotProcessLoopsTwice() throws Exception {
// GIVEN
NodeRelationshipCache cache = mock(NodeRelationshipCache.class);
try (CalculateDenseNodesStep step = new CalculateDenseNodesStep(mock(StageControl.class), DEFAULT, cache, mock(Collector.class))) {
step.processors(4);
step.start(0);
// WHEN
Batch<InputRelationship, RelationshipRecord> batch = batch(relationship(1, 5), relationship(3, 10), // <-- the loop
relationship(2, 2), relationship(4, 1));
step.receive(0, batch);
step.endOfUpstream();
while (!step.isCompleted()) {
// wait
}
// THEN
verify(cache, times(2)).incrementCount(eq(1L));
verify(cache, times(1)).incrementCount(eq(2L));
verify(cache, times(1)).incrementCount(eq(3L));
verify(cache, times(1)).incrementCount(eq(4L));
verify(cache, times(1)).incrementCount(eq(5L));
verify(cache, times(1)).incrementCount(eq(10L));
}
}
use of org.neo4j.unsafe.impl.batchimport.cache.NodeRelationshipCache in project neo4j by neo4j.
the class ParallelBatchImporter method doImport.
@Override
public void doImport(Input input) throws IOException {
log.info("Import starting");
// Things that we need to close later. The reason they're not in the try-with-resource statement
// is that we need to close, and set to null, at specific points preferably. So use good ol' finally block.
NodeRelationshipCache nodeRelationshipCache = null;
NodeLabelsCache nodeLabelsCache = null;
long startTime = currentTimeMillis();
CountingStoreUpdateMonitor storeUpdateMonitor = new CountingStoreUpdateMonitor();
try (BatchingNeoStores neoStore = getBatchingNeoStores();
CountsAccessor.Updater countsUpdater = neoStore.getCountsStore().reset(neoStore.getLastCommittedTransactionId());
InputCache inputCache = new InputCache(fileSystem, storeDir, recordFormats, config)) {
Collector badCollector = input.badCollector();
// Some temporary caches and indexes in the import
IoMonitor writeMonitor = new IoMonitor(neoStore.getIoTracer());
IdMapper idMapper = input.idMapper();
IdGenerator idGenerator = input.idGenerator();
nodeRelationshipCache = new NodeRelationshipCache(AUTO, config.denseNodeThreshold());
StatsProvider memoryUsageStats = new MemoryUsageStatsProvider(nodeRelationshipCache, idMapper);
InputIterable<InputNode> nodes = input.nodes();
InputIterable<InputRelationship> relationships = input.relationships();
InputIterable<InputNode> cachedNodes = cachedForSure(nodes, inputCache.nodes(MAIN, true));
InputIterable<InputRelationship> cachedRelationships = cachedForSure(relationships, inputCache.relationships(MAIN, true));
RelationshipStore relationshipStore = neoStore.getRelationshipStore();
// Stage 1 -- nodes, properties, labels
NodeStage nodeStage = new NodeStage(config, writeMonitor, nodes, idMapper, idGenerator, neoStore, inputCache, neoStore.getLabelScanStore(), storeUpdateMonitor, nodeRelationshipCache, memoryUsageStats);
executeStage(nodeStage);
if (idMapper.needsPreparation()) {
executeStage(new IdMapperPreparationStage(config, idMapper, cachedNodes, badCollector, memoryUsageStats));
PrimitiveLongIterator duplicateNodeIds = badCollector.leftOverDuplicateNodesIds();
if (duplicateNodeIds.hasNext()) {
executeStage(new DeleteDuplicateNodesStage(config, duplicateNodeIds, neoStore));
}
}
// Stage 2 -- calculate dense node threshold
CalculateDenseNodesStage calculateDenseNodesStage = new CalculateDenseNodesStage(withBatchSize(config, config.batchSize() * 10), relationships, nodeRelationshipCache, idMapper, badCollector, inputCache, neoStore);
executeStage(calculateDenseNodesStage);
importRelationships(nodeRelationshipCache, storeUpdateMonitor, neoStore, writeMonitor, idMapper, cachedRelationships, inputCache, calculateDenseNodesStage.getRelationshipTypes(Long.MAX_VALUE), calculateDenseNodesStage.getRelationshipTypes(100));
// Release this potentially really big piece of cached data
long peakMemoryUsage = totalMemoryUsageOf(idMapper, nodeRelationshipCache);
long highNodeId = nodeRelationshipCache.getHighNodeId();
idMapper.close();
idMapper = null;
nodeRelationshipCache.close();
nodeRelationshipCache = null;
new RelationshipGroupDefragmenter(config, executionMonitor).run(max(max(peakMemoryUsage, highNodeId * 4), mebiBytes(1)), neoStore, highNodeId);
// Stage 6 -- count nodes per label and labels per node
nodeLabelsCache = new NodeLabelsCache(AUTO, neoStore.getLabelRepository().getHighId());
memoryUsageStats = new MemoryUsageStatsProvider(nodeLabelsCache);
executeStage(new NodeCountsStage(config, nodeLabelsCache, neoStore.getNodeStore(), neoStore.getLabelRepository().getHighId(), countsUpdater, memoryUsageStats));
// Stage 7 -- count label-[type]->label
executeStage(new RelationshipCountsStage(config, nodeLabelsCache, relationshipStore, neoStore.getLabelRepository().getHighId(), neoStore.getRelationshipTypeRepository().getHighId(), countsUpdater, AUTO));
// We're done, do some final logging about it
long totalTimeMillis = currentTimeMillis() - startTime;
executionMonitor.done(totalTimeMillis, format("%n") + storeUpdateMonitor.toString() + format("%n") + "Peak memory usage: " + bytes(peakMemoryUsage));
log.info("Import completed, took " + Format.duration(totalTimeMillis) + ". " + storeUpdateMonitor);
} catch (Throwable t) {
log.error("Error during import", t);
throw Exceptions.launderedException(IOException.class, t);
} finally {
if (nodeRelationshipCache != null) {
nodeRelationshipCache.close();
}
if (nodeLabelsCache != null) {
nodeLabelsCache.close();
}
}
}
Aggregations