use of org.wikidata.query.rdf.tool.exception.FatalException in project wikidata-query-rdf by wikimedia.
the class RdfRepository method execute.
/**
* Execute some raw SPARQL.
*
* @param type name of the parameter in which to send sparql
* @return results string from the server
*/
protected <T> T execute(String type, ResponseHandler<T> responseHandler, String sparql) {
log.trace("Running SPARQL: {}", sparql);
long startQuery = System.currentTimeMillis();
// TODO we might want to look into Blazegraph's incremental update
// reporting.....
final ContentResponse response;
try {
response = retryer.call(() -> makeRequest(type, sparql, responseHandler.acceptHeader()).send());
if (response.getStatus() != HttpStatus.OK_200) {
throw new ContainedException("Non-200 response from triple store: " + response + " body=\n" + responseBodyAsString(response));
}
log.debug("Completed in {} ms", System.currentTimeMillis() - startQuery);
return responseHandler.parse(response);
} catch (ExecutionException | RetryException | IOException e) {
throw new FatalException("Error updating triple store", e);
}
}
use of org.wikidata.query.rdf.tool.exception.FatalException in project wikidata-query-rdf by wikimedia.
the class RdfRepository method dateFromQuery.
/**
* Run a query that returns just a date in the "date" binding and return its
* result.
*/
private Instant dateFromQuery(String query) {
TupleQueryResult result = query(query);
try {
if (!result.hasNext()) {
return null;
}
Binding maxLastUpdate = result.next().getBinding("date");
if (maxLastUpdate == null) {
return null;
}
// Note that XML calendar and Instant have the same default format
XMLGregorianCalendar xmlCalendar = ((Literal) maxLastUpdate.getValue()).calendarValue();
/*
* We convert rather blindly to a GregorianCalendar because we're
* reasonably sure all the right data is present.
*/
GregorianCalendar calendar = xmlCalendar.toGregorianCalendar();
return calendar.getTime().toInstant();
} catch (QueryEvaluationException e) {
throw new FatalException("Error evaluating query", e);
}
}
use of org.wikidata.query.rdf.tool.exception.FatalException in project wikidata-query-rdf by wikimedia.
the class RdfRepository method resultToMap.
/**
* Collect results of the query into a multimap by first parameter.
*
* @param result Result object
* @param keyBinding Binding name to serve as key
* @param valueBinding Binding name to serve as values
* @return Collection of strings resulting from the query.
*/
private ImmutableSetMultimap<String, String> resultToMap(TupleQueryResult result, String keyBinding, String valueBinding) {
ImmutableSetMultimap.Builder<String, String> values = ImmutableSetMultimap.builder();
try {
while (result.hasNext()) {
BindingSet bindings = result.next();
Binding value = bindings.getBinding(valueBinding);
Binding key = bindings.getBinding(keyBinding);
if (value == null || key == null) {
continue;
}
values.put(key.getValue().stringValue(), value.getValue().stringValue());
}
} catch (QueryEvaluationException e) {
throw new FatalException("Can't load results: " + e, e);
}
return values.build();
}
use of org.wikidata.query.rdf.tool.exception.FatalException in project wikidata-query-rdf by wikimedia.
the class RdfRepository method syncFromChanges.
/**
* Sync repository from changes list.
* @param changes List of changes.
* @return Number of triples modified.
*/
public int syncFromChanges(Collection<Change> changes, boolean verifyResult) {
if (changes.isEmpty()) {
// no changes, we're done
return 0;
}
UpdateBuilder b = new UpdateBuilder(msyncBody);
b.bindUri("schema:about", SchemaDotOrg.ABOUT);
b.bindUri("prov:wasDerivedFrom", Provenance.WAS_DERIVED_FROM);
b.bind("uris.value", uris.value());
b.bind("uris.statement", uris.statement());
Set<String> entityIds = newHashSetWithExpectedSize(changes.size());
List<Statement> insertStatements = new ArrayList<>();
List<Statement> entityStatements = new ArrayList<>();
List<Statement> statementStatements = new ArrayList<>();
Set<Statement> aboutStatements = new HashSet<>();
Set<String> valueSet = new HashSet<>();
for (final Change change : changes) {
if (change.getStatements() == null) {
// broken change, probably failed retrieval
continue;
}
entityIds.add(change.entityId());
insertStatements.addAll(change.getStatements());
classifyStatements(change.getStatements(), change.entityId(), entityStatements, statementStatements, aboutStatements);
valueSet.addAll(change.getCleanupList());
}
if (entityIds.isEmpty()) {
// If we've got no IDs, this means all change retrieval failed
log.debug("Got no valid changes, we're done");
return 0;
}
b.bindUris("entityList", entityIds, uris.entity());
b.bindStatements("insertStatements", insertStatements);
b.bindValues("entityStatements", entityStatements);
b.bindValues("statementStatements", statementStatements);
b.bindValues("aboutStatements", aboutStatements);
if (!valueSet.isEmpty()) {
UpdateBuilder cleanup = new UpdateBuilder(cleanUnused);
cleanup.bindUris("values", valueSet);
b.bind("cleanupQuery", cleanup.toString());
} else {
b.bind("cleanupQuery", "");
}
long start = System.currentTimeMillis();
int modified = execute("update", UPDATE_COUNT_RESPONSE, b.toString());
log.debug("Update query took {} millis and modified {} statements", System.currentTimeMillis() - start, modified);
if (verifyResult) {
try {
verifyStatements(entityIds, insertStatements);
} catch (QueryEvaluationException e) {
throw new FatalException("Can't load verify results: " + e, e);
}
}
return modified;
}
use of org.wikidata.query.rdf.tool.exception.FatalException in project wikidata-query-rdf by wikimedia.
the class RdfRepository method resultToSet.
/**
* Collect results of the query into string set.
*
* @param result Result object
* @param binding Binding name to collect
* @return Collection of strings resulting from the query.
*/
private Set<String> resultToSet(TupleQueryResult result, String binding) {
HashSet<String> values = new HashSet<String>();
try {
while (result.hasNext()) {
Binding value = result.next().getBinding(binding);
if (value == null) {
continue;
}
values.add(value.getValue().stringValue());
}
} catch (QueryEvaluationException e) {
throw new FatalException("Can't load results: " + e, e);
}
return values;
}
Aggregations