use of org.neo4j.unsafe.impl.batchimport.input.InputNode in project neo4j by neo4j.
the class ParallelInputEntityDeserializerTest method shouldTreatExternalCloseAsPanic.
// Timeout is so that if this bug strikes again it will only cause this test to run for a limited time
// before failing. Normally this test is really quick
@Test(timeout = 10_000)
public void shouldTreatExternalCloseAsPanic() throws Exception {
// GIVEN enough data to fill up queues
int entities = 500;
Data<InputNode> data = testData(entities);
Configuration config = new Configuration.Overridden(COMMAS) {
@Override
public int bufferSize() {
return 100;
}
};
IdType idType = ACTUAL;
Collector badCollector = mock(Collector.class);
Groups groups = new Groups();
// WHEN closing before having consumed all results
DeserializerFactory<InputNode> deserializerFactory = defaultNodeDeserializer(groups, config, idType, badCollector);
try (ParallelInputEntityDeserializer<InputNode> deserializer = new ParallelInputEntityDeserializer<>(data, defaultFormatNodeFileHeader(), config, idType, 3, 3, deserializerFactory, Validators.<InputNode>emptyValidator(), InputNode.class)) {
deserializer.hasNext();
deserializer.receivePanic(new RuntimeException());
// processed items so that it wants to go ahead and offer its result.
for (int i = 0; i < 100 && deserializer.hasNext(); i++) {
deserializer.next();
}
} catch (TaskExecutionPanicException e) {
// THEN it should be able to exit (this exception comes as a side effect)
}
}
use of org.neo4j.unsafe.impl.batchimport.input.InputNode in project neo4j by neo4j.
the class ParallelInputEntityDeserializerTest method shouldParseDataInParallel.
@Test
public void shouldParseDataInParallel() throws Exception {
// GIVEN
int entities = 500;
Data<InputNode> data = testData(entities);
Configuration config = new Configuration.Overridden(COMMAS) {
@Override
public int bufferSize() {
return 100;
}
};
IdType idType = ACTUAL;
Collector badCollector = mock(Collector.class);
Groups groups = new Groups();
Set<Thread> observedProcessingThreads = new CopyOnWriteArraySet<>();
int threads = 4;
DeserializerFactory<InputNode> deserializerFactory = (header, chunk, decorator, validator) -> {
observedProcessingThreads.add(Thread.currentThread());
// Make sure there will be 4 different processing threads doing this
boolean allThreadsStarted;
do {
allThreadsStarted = observedProcessingThreads.size() == threads;
} while (!allThreadsStarted);
return new InputEntityDeserializer<>(header, chunk, config.delimiter(), new InputNodeDeserialization(header, chunk, groups, idType.idsAreExternal()), decorator, validator, badCollector);
};
try (ParallelInputEntityDeserializer<InputNode> deserializer = new ParallelInputEntityDeserializer<>(data, defaultFormatNodeFileHeader(), config, idType, threads, threads, deserializerFactory, Validators.<InputNode>emptyValidator(), InputNode.class)) {
// WHEN/THEN
long previousLineNumber = -1;
long previousPosition = -1;
for (long i = 0; i < entities; i++) {
assertTrue(deserializer.hasNext());
InputNode entity = deserializer.next();
assertEquals(i, ((Long) entity.id()).longValue());
assertEquals("name", entity.properties()[0]);
assertTrue(entity.properties()[1].toString().startsWith(i + "-"));
assertTrue(entity.lineNumber() > previousLineNumber);
previousLineNumber = entity.lineNumber();
assertTrue(entity.position() > previousPosition);
previousPosition = entity.position();
}
assertFalse(deserializer.hasNext());
assertEquals(threads, observedProcessingThreads.size());
}
}
Aggregations