use of org.wikidata.query.rdf.tool.rdf.ConsumerPatch in project wikidata-query-rdf by wikimedia.
the class UpdatePatchAccumulatorUnitTest method test_add_then_remove_should_try_to_prune_removed_entity_shared_triples.
@Test
public void test_add_then_remove_should_try_to_prune_removed_entity_shared_triples() {
PatchAccumulator accumulator = new PatchAccumulator(deserializer);
accumulateDiff(accumulator, "UNRELATED", singletonList(stmt("uri:added-unrelated-entity")), singletonList(stmt("uri:deleted-unrelated-entity")), singletonList(stmt("uri:linked-shared")), singletonList(stmt("uri:unlinked-shared")));
accumulateDiff(accumulator, "Q1", singletonList(stmt("uri:added-Q1")), singletonList(stmt("uri:deleted-Q1")), asList(stmt("uri:linked-shared"), stmt("uri:")), singletonList(stmt("uri:unlinked-shared")));
accumulateDiff(accumulator, "Q1", emptyList(), emptyList(), singletonList(stmt("uri:")), emptyList());
accumulateDelete(accumulator, "Q1");
ConsumerPatch expected = new ConsumerPatch(singletonList(stmt("uri:added-unrelated-entity")), singletonList(stmt("uri:linked-shared")), singletonList(stmt("uri:deleted-unrelated-entity")), singletonList(stmt("uri:unlinked-shared")), singletonList("Q1"), Collections.emptyMap());
assertThat(accumulator.asPatch()).withFailMessage("Deleting an entity should create a patch without any triples for this entity even shared ones").isEqualTo(expected);
}
use of org.wikidata.query.rdf.tool.rdf.ConsumerPatch in project wikidata-query-rdf by wikimedia.
the class UpdatePatchAccumulatorUnitTest method test_cannot_accumulate_similar_triples_for_unrelated_entities.
@Test
public void test_cannot_accumulate_similar_triples_for_unrelated_entities() {
PatchAccumulator accumulator = new PatchAccumulator(deserializer);
accumulateDiff(accumulator, "Q1", singletonList(stmt("uri:added")), singletonList(stmt("uri:removed")), singletonList(stmt("uri:linked-shared")), singletonList(stmt("uri:unlinked-shared")));
assertThatThrownBy(() -> accumulateDiff(accumulator, "Q2", singletonList(stmt("uri:added")), emptyList(), emptyList(), emptyList()), "Cannot add the same triple for a different entity (should probably be considered as a shared statement)").isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> accumulateDiff(accumulator, "Q2", emptyList(), singletonList(stmt("uri:removed")), emptyList(), emptyList()), "Cannot delete the same triple for a different entity (should probably be considered as a shared statement)").isInstanceOf(IllegalArgumentException.class);
ConsumerPatch expectedPatch = accumulator.asPatch();
accumulateDiff(accumulator, "Q2", emptyList(), emptyList(), singletonList(stmt("uri:linked-shared")), singletonList(stmt("uri:unlinked-shared")));
assertThat(accumulator.asPatch()).withFailMessage("Accumulating same shared statements for different entities should result in the same patch").isEqualTo(expectedPatch);
}
use of org.wikidata.query.rdf.tool.rdf.ConsumerPatch in project wikidata-query-rdf by wikimedia.
the class KafkaStreamConsumerUnitTest method test_prefer_reassembled_message.
@Test
public void test_prefer_reassembled_message() {
int bufferedMessages = 250;
TopicPartition topicPartition = new TopicPartition("test", 0);
List<ConsumerRecord<String, MutationEventData>> allRecords = IntStream.range(0, bufferedMessages).mapToObj(i -> {
EventsMeta meta = new EventsMeta(Instant.EPOCH, UUID.randomUUID().toString(), TEST_DOMAIN, TESTED_STREAM, "unused");
MutationEventData diff = new DiffEventData(meta, "Q1", 1, Instant.EPOCH, i, bufferedMessages, MutationEventData.DIFF_OPERATION, new RDFDataChunk("<uri:a> <uri:a> <uri:" + i + "> .\n", RDFFormat.TURTLE.getDefaultMIMEType()), null, null, null);
return new ConsumerRecord<String, MutationEventData>(topicPartition.topic(), topicPartition.partition(), i, null, diff);
}).collect(toList());
when(consumer.poll(any())).thenReturn(new ConsumerRecords<>(singletonMap(topicPartition, allRecords.subList(0, bufferedMessages / 2))), new ConsumerRecords<>(singletonMap(topicPartition, allRecords.subList(bufferedMessages / 2, allRecords.size()))), new ConsumerRecords<>(emptyMap()));
KafkaStreamConsumer streamConsumer = new KafkaStreamConsumer(consumer, topicPartition, chunkDeser, 10, KafkaStreamConsumerMetricsListener.forRegistry(new MetricRegistry()), m -> true);
StreamConsumer.Batch b = streamConsumer.poll(Duration.ofMillis(100));
assertThat(b).isNotNull();
ConsumerPatch patch = b.getPatch();
assertThat(patch.getAdded().size()).isEqualTo(bufferedMessages);
streamConsumer.acknowledge();
b = streamConsumer.poll(Duration.ofMillis(100));
assertThat(b).isNull();
}
use of org.wikidata.query.rdf.tool.rdf.ConsumerPatch in project wikidata-query-rdf by wikimedia.
the class UpdatePatchAccumulatorUnitTest method test_add_then_remove_same_entity_should_create_a_path_without_triples_for_this_entity.
@Test
public void test_add_then_remove_same_entity_should_create_a_path_without_triples_for_this_entity() {
PatchAccumulator accumulator = new PatchAccumulator(deserializer);
accumulateDiff(accumulator, "UNRELATED", singletonList(stmt("uri:added-unrelated-entity")), singletonList(stmt("uri:deleted-unrelated-entity")), singletonList(stmt("uri:linked-shared")), singletonList(stmt("uri:unlinked-shared")));
accumulateDiff(accumulator, "Q1", singletonList(stmt("uri:added-Q1")), singletonList(stmt("uri:deleted-Q1")), singletonList(stmt("uri:linked-shared")), singletonList(stmt("uri:unlinked-shared")));
accumulateDelete(accumulator, "Q1");
ConsumerPatch expected = new ConsumerPatch(singletonList(stmt("uri:added-unrelated-entity")), singletonList(stmt("uri:linked-shared")), singletonList(stmt("uri:deleted-unrelated-entity")), singletonList(stmt("uri:unlinked-shared")), singletonList("Q1"), Collections.emptyMap());
assertThat(accumulator.asPatch()).withFailMessage("Deleting an entity should create a patch without any triples for this entity").isEqualTo(expected);
}
use of org.wikidata.query.rdf.tool.rdf.ConsumerPatch in project wikidata-query-rdf by wikimedia.
the class UpdatePatchAccumulatorUnitTest method test_duplicated_values_can_be_accumulated.
@Test
public void test_duplicated_values_can_be_accumulated() {
PatchAccumulator accumulator = new PatchAccumulator(deserializer);
MutationEventDataGenerator bigChunkEventGenerator = new MutationEventDataGenerator(serializer, RDFFormat.TURTLE.getDefaultMIMEType(), Integer.MAX_VALUE);
accumulateDiff(accumulator, "Q1", asList(stmt("uri:added-1"), stmt("uri:added-1")), asList(stmt("uri:removed-1"), stmt("uri:removed-1")), asList(stmt("uri:linked-shared"), stmt("uri:linked-shared")), asList(stmt("uri:unlinked-shared"), stmt("uri:unlinked-shared")), bigChunkEventGenerator);
ConsumerPatch actual = accumulator.asPatch();
assertThat(actual.getAdded()).containsExactlyInAnyOrder(stmt("uri:added-1"));
assertThat(actual.getRemoved()).contains(stmt("uri:removed-1"));
assertThat(actual.getLinkedSharedElements()).containsExactlyInAnyOrder(stmt("uri:linked-shared"));
assertThat(actual.getUnlinkedSharedElements()).contains(stmt("uri:unlinked-shared"));
}
Aggregations