use of ai.grakn.kb.internal.EmbeddedGraknTx in project grakn by graknlabs.
the class CountPostProcessorTest method setupMocks.
@Before
public void setupMocks() {
countStorage = mock(RedisCountStorage.class);
when(countStorage.getCount(any())).thenReturn(1L);
configMock = mock(GraknConfig.class);
when(configMock.getProperty(GraknConfigKey.SHARDING_THRESHOLD)).thenReturn(5L);
GraknKeyspaceStore graknKeyspaceStoreMock = mock(GraknKeyspaceStore.class);
when(graknKeyspaceStoreMock.containsKeyspace(any())).thenReturn(true);
EmbeddedGraknTx txMock = mock(EmbeddedGraknTx.class);
when(txMock.admin()).thenReturn(mock(GraknAdmin.class));
factoryMock = mock(EngineGraknTxFactory.class);
when(factoryMock.keyspaceStore()).thenReturn(graknKeyspaceStoreMock);
when(factoryMock.tx(any(Keyspace.class), any())).thenReturn(txMock);
lockProviderMock = mock(LockProvider.class);
when(lockProviderMock.getLock(any())).thenReturn(new ReentrantLock());
metricRegistry = new MetricRegistry();
countPostProcessor = CountPostProcessor.create(configMock, factoryMock, lockProviderMock, metricRegistry, countStorage);
}
use of ai.grakn.kb.internal.EmbeddedGraknTx in project grakn by graknlabs.
the class PostProcessingTest method whenCreatingDuplicateResources_EnsureTheyAreMergedInPost.
@Test
public void whenCreatingDuplicateResources_EnsureTheyAreMergedInPost() throws InvalidKBException, InterruptedException, JsonProcessingException {
String value = "1";
String sample = "Sample";
// Create GraknTx With Duplicate Resources
EmbeddedGraknTx<?> tx = session.open(GraknTxType.WRITE);
AttributeType<String> attributeType = tx.putAttributeType(sample, AttributeType.DataType.STRING);
Attribute<String> attribute = attributeType.putAttribute(value);
tx.commitSubmitNoLogs();
tx = session.open(GraknTxType.WRITE);
assertEquals(1, attributeType.instances().count());
// Check duplicates have been created
Set<Vertex> resource1 = createDuplicateResource(tx, attributeType, attribute);
Set<Vertex> resource2 = createDuplicateResource(tx, attributeType, attribute);
Set<Vertex> resource3 = createDuplicateResource(tx, attributeType, attribute);
Set<Vertex> resource4 = createDuplicateResource(tx, attributeType, attribute);
assertEquals(5, attributeType.instances().count());
// Attribute vertex index
String resourceIndex = resource1.iterator().next().value(INDEX.name()).toString();
// Merge the attribute sets
Set<Vertex> merged = Sets.newHashSet();
merged.addAll(resource1);
merged.addAll(resource2);
merged.addAll(resource3);
merged.addAll(resource4);
tx.close();
// Casting sets as ConceptIds
Set<ConceptId> resourceConcepts = merged.stream().map(c -> ConceptId.of(Schema.PREFIX_VERTEX + c.id().toString())).collect(toSet());
// Create Commit Log
CommitLog commitLog = CommitLog.createDefault(tx.keyspace());
commitLog.attributes().put(resourceIndex, resourceConcepts);
// Submit it
postProcessor.submit(commitLog);
// Force running the PP job
engine.server().backgroundTaskRunner().tasks().forEach(BackgroundTask::run);
Thread.sleep(2000);
tx = session.open(GraknTxType.READ);
// Check it's fixed
assertEquals(1, tx.getAttributeType(sample).instances().count());
tx.close();
}
Aggregations