use of org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException in project OpenRefine by OpenRefine.
the class ConnectionManager method logout.
public void logout(String mediaWikiApiEndpoint) {
ApiConnection connection = endpointToConnection.get(mediaWikiApiEndpoint);
if (connection != null) {
try {
connection.logout();
endpointToConnection.remove(mediaWikiApiEndpoint);
} catch (IOException e) {
logger.error(e.getMessage(), e);
} catch (MediaWikiApiErrorException e) {
if ("assertuserfailed".equals(e.getErrorCode())) {
// it turns out we were already logged out
endpointToConnection.remove(mediaWikiApiEndpoint);
} else {
logger.error(e.getMessage(), e);
}
}
}
}
use of org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException in project OpenRefine by OpenRefine.
the class EditBatchProcessorTest method testMultipleBatches.
@Test
public void testMultipleBatches() throws MediaWikiApiErrorException, InterruptedException, IOException {
// Prepare test data
MonolingualTextValue description = Datamodel.makeMonolingualTextValue("village in Nepal", "en");
List<String> ids = new ArrayList<>();
for (int i = 124; i < 190; i++) {
ids.add("Q" + String.valueOf(i));
}
List<ItemIdValue> qids = ids.stream().map(e -> Datamodel.makeWikidataItemIdValue(e)).collect(Collectors.toList());
List<TermedStatementEntityEdit> batch = qids.stream().map(qid -> new TermedStatementEntityEditBuilder(qid).addDescription(description, true).build()).collect(Collectors.toList());
int batchSize = 50;
List<ItemDocument> fullBatch = qids.stream().map(qid -> ItemDocumentBuilder.forItemId(qid).withStatement(TestingData.generateStatement(qid, TestingData.existingId)).build()).collect(Collectors.toList());
List<ItemDocument> firstBatch = fullBatch.subList(0, batchSize);
List<ItemDocument> secondBatch = fullBatch.subList(batchSize, fullBatch.size());
when(fetcher.getEntityDocuments(toQids(firstBatch))).thenReturn(toMap(firstBatch));
when(fetcher.getEntityDocuments(toQids(secondBatch))).thenReturn(toMap(secondBatch));
// Run edits
EditBatchProcessor processor = new EditBatchProcessor(fetcher, editor, batch, library, summary, maxlag, tags, batchSize);
assertEquals(0, processor.progress());
for (int i = 124; i < 190; i++) {
assertEquals(processor.remainingEdits(), 190 - i);
processor.performEdit();
}
assertEquals(0, processor.remainingEdits());
assertEquals(100, processor.progress());
// Check result
assertEquals(new NewEntityLibrary(), library);
verify(fetcher, times(1)).getEntityDocuments(toQids(firstBatch));
verify(fetcher, times(1)).getEntityDocuments(toQids(secondBatch));
for (ItemDocument doc : fullBatch) {
verify(editor, times(1)).editEntityDocument(Datamodel.makeItemUpdate(doc.getEntityId(), doc.getRevisionId(), Datamodel.makeTermUpdate(Collections.emptyList(), Collections.emptyList()), Datamodel.makeTermUpdate(Collections.singletonList(description), Collections.emptyList()), Collections.emptyMap(), Datamodel.makeStatementUpdate(Collections.emptyList(), Collections.emptyList(), Collections.emptyList()), Collections.emptyList(), Collections.emptyList()), false, summary, tags);
}
}
use of org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException in project OpenRefine by OpenRefine.
the class EditBatchProcessorTest method testMultipleBatchesMediaInfo.
@Test
public void testMultipleBatchesMediaInfo() throws MediaWikiApiErrorException, InterruptedException, IOException {
// Prepare test data
MonolingualTextValue label = Datamodel.makeMonolingualTextValue("village in Nepal", "en");
List<MonolingualTextValue> labels = Collections.singletonList(label);
TermUpdate labelsUpdate = Datamodel.makeTermUpdate(labels, Collections.emptyList());
List<String> ids = new ArrayList<>();
for (int i = 124; i < 190; i++) {
ids.add("M" + String.valueOf(i));
}
List<MediaInfoIdValue> mids = ids.stream().map(e -> Datamodel.makeWikimediaCommonsMediaInfoIdValue(e)).collect(Collectors.toList());
List<TermedStatementEntityEdit> batch = mids.stream().map(mid -> new TermedStatementEntityEditBuilder(mid).addLabel(label, false).build()).collect(Collectors.toList());
int batchSize = 50;
List<MediaInfoDocument> fullBatch = mids.stream().map(mid -> Datamodel.makeMediaInfoDocument(mid)).collect(Collectors.toList());
List<MediaInfoDocument> firstBatch = fullBatch.subList(0, batchSize);
List<MediaInfoDocument> secondBatch = fullBatch.subList(batchSize, fullBatch.size());
when(fetcher.getEntityDocuments(toMids(firstBatch))).thenReturn(toMapMediaInfo(firstBatch));
when(fetcher.getEntityDocuments(toMids(secondBatch))).thenReturn(toMapMediaInfo(secondBatch));
// Run edits
EditBatchProcessor processor = new EditBatchProcessor(fetcher, editor, batch, library, summary, maxlag, tags, batchSize);
assertEquals(0, processor.progress());
for (int i = 124; i < 190; i++) {
assertEquals(processor.remainingEdits(), 190 - i);
processor.performEdit();
}
assertEquals(0, processor.remainingEdits());
assertEquals(100, processor.progress());
// Check result
assertEquals(new NewEntityLibrary(), library);
verify(fetcher, times(1)).getEntityDocuments(toMids(firstBatch));
verify(fetcher, times(1)).getEntityDocuments(toMids(secondBatch));
for (MediaInfoDocument doc : fullBatch) {
StatementUpdate statementUpdate = Datamodel.makeStatementUpdate(Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
verify(editor, times(1)).editEntityDocument(Datamodel.makeMediaInfoUpdate((MediaInfoIdValue) doc.getEntityId(), doc.getRevisionId(), labelsUpdate, statementUpdate), false, summary, tags);
}
}
use of org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException in project OpenRefine by OpenRefine.
the class EditBatchProcessor method performEdit.
/**
* Performs the next edit in the batch.
*
* @throws InterruptedException
*/
public void performEdit() throws InterruptedException {
if (remainingEdits() == 0) {
return;
}
if (batchCursor == currentBatch.size()) {
prepareNewBatch();
}
TermedStatementEntityEdit update = currentBatch.get(batchCursor);
// Rewrite mentions to new entities
ReconEntityRewriter rewriter = new ReconEntityRewriter(library, update.getEntityId());
try {
update = rewriter.rewrite(update);
} catch (NewEntityNotCreatedYetException e) {
logger.warn("Failed to rewrite update on entity " + update.getEntityId() + ". Missing entity: " + e.getMissingEntity() + ". Skipping update.");
batchCursor++;
return;
}
try {
// New entities
if (update.isNew()) {
ReconEntityIdValue newCell = (ReconEntityIdValue) update.getEntityId();
// TODO Antonin, 2022-02-11: remove this casting once we have https://github.com/Wikidata/Wikidata-Toolkit/issues/651
if (newCell instanceof ItemIdValue) {
update = update.normalizeLabelsAndAliases();
ItemDocument itemDocument = (ItemDocument) update.toNewEntity();
ItemDocument createdDoc = editor.createItemDocument(itemDocument, summary, tags);
library.setId(newCell.getReconInternalId(), createdDoc.getEntityId().getId());
} else if (newCell instanceof MediaInfoIdValue) {
update = update.normalizeLabelsAndAliases();
throw new NotImplementedException();
}
} else {
// Existing entities
EntityUpdate entityUpdate = update.toEntityUpdate(currentDocs.get(update.getEntityId().getId()));
editor.editEntityDocument(entityUpdate, false, summary, tags);
}
} catch (MediaWikiApiErrorException e) {
// TODO find a way to report these errors to the user in a nice way
logger.warn("MediaWiki error while editing [" + e.getErrorCode() + "]: " + e.getErrorMessage());
} catch (IOException e) {
logger.warn("IO error while editing: " + e.getMessage());
}
batchCursor++;
}
use of org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException in project OpenRefine by OpenRefine.
the class ConnectionManager method login.
/**
* Logs in to the Wikibase instance, using cookies.
* <p>
* If failed to login, the connection will be set to null.
*
* @param mediaWikiApiEndpoint the api endpoint of the target Wikibase instance
* @param username the username
* @param cookies the cookies used to login
* @return true if logged in successfully, false otherwise
*/
public boolean login(String mediaWikiApiEndpoint, String username, List<Cookie> cookies) {
cookies.forEach(cookie -> cookie.setPath("/"));
Map<String, Object> map = new HashMap<>();
map.put("baseUrl", mediaWikiApiEndpoint);
map.put("cookies", cookies);
map.put("username", username);
map.put("loggedIn", true);
map.put("tokens", Collections.emptyMap());
map.put("connectTimeout", CONNECT_TIMEOUT);
map.put("readTimeout", READ_TIMEOUT);
try {
BasicApiConnection connection = convertToBasicApiConnection(map);
connection.checkCredentials();
endpointToConnection.put(mediaWikiApiEndpoint, connection);
return true;
} catch (IOException | MediaWikiApiErrorException e) {
logger.error(e.getMessage(), e);
return false;
}
}
Aggregations