use of nl.knaw.huygens.timbuctoo.v5.filestorage.exceptions.LogStorageFailedException in project timbuctoo by HuygensING.
the class ViewConfigMutation method get.
@Override
public Object get(DataFetchingEnvironment env) {
String collectionUri = env.getArgument("collectionUri");
Object viewConfig = env.getArgument("viewConfig");
DataSet dataSet = MutationHelpers.getDataSet(env, dataSetRepository::getDataSet);
MutationHelpers.checkAdminPermissions(env, dataSet.getMetadata());
try {
MutationHelpers.addMutation(dataSet, new PredicateMutation().entity(collectionUri, replace(HAS_VIEW_CONFIG, value(OBJECT_MAPPER.writeValueAsString(viewConfig)))));
return viewConfig;
} catch (LogStorageFailedException | InterruptedException | ExecutionException | JsonProcessingException e) {
throw new RuntimeException(e);
}
}
use of nl.knaw.huygens.timbuctoo.v5.filestorage.exceptions.LogStorageFailedException in project timbuctoo by HuygensING.
the class RawUploadRdfSaverTest method usageTest.
@Test
public void usageTest() throws LogStorageFailedException {
RdfToStringFaker rdfSerializer = new RdfToStringFaker();
RawUploadRdfSaver instance = instanceWithRdfSerializer(rdfSerializer, dataSetMetadata);
final String collection1 = instance.addCollection("collection1");
ImportPropertyDescriptions importPropertyDescriptions = new ImportPropertyDescriptions();
importPropertyDescriptions.getOrCreate(1).setPropertyName("propName1");
importPropertyDescriptions.getOrCreate(2).setPropertyName("propName2");
instance.addPropertyDescriptions(collection1, importPropertyDescriptions);
instance.addEntity(collection1, ImmutableMap.of("propName1", "value1", "propName2", "val2"));
instance.addEntity(collection1, ImmutableMap.of("propName1", "entVal1", "propName2", "entVal2"));
final String collection2 = instance.addCollection("collection2");
ImportPropertyDescriptions importPropertyDescriptions1 = new ImportPropertyDescriptions();
importPropertyDescriptions1.getOrCreate(1).setPropertyName("prop3");
importPropertyDescriptions1.getOrCreate(2).setPropertyName("prop4");
instance.addPropertyDescriptions(collection2, importPropertyDescriptions1);
instance.addEntity(collection2, ImmutableMap.of("prop3", "value1", "prop4", "val2"));
instance.addEntity(collection2, ImmutableMap.of("prop3", "entVal1", "prop4", "entVal2"));
String generatedRdf = rdfSerializer.toString();
// Use assertEquals because the failing Hamcrest output is hard to compare
String graphName = dataSetMetadata.getBaseUri();
String fileUri = dataSetMetadata.getUriPrefix() + "rawData/fileName/";
String prop = fileUri + "props/";
String rowData = fileUri + "entities/";
assertEquals(fileUri + " " + RDF_TYPE + " " + TIM_TABULAR_FILE + " " + graphName + "\n" + graphName + " " + PROV_DERIVED_FROM + " " + fileUri + " " + graphName + "\n" + fileUri + " " + TIM_MIMETYPE + " " + "application/octet-stream" + "^^" + STRING + " " + graphName + "\n" + fileUri + " " + RDFS_LABEL + " " + FILE_NAME + "^^" + STRING + " " + graphName + "\n" + fileUri + " " + PROV_ATTIME + " " + DATE + "^^" + XSD_DATETIMESTAMP + " " + graphName + "\n" + collection1 + " " + RDF_TYPE + " " + collection1 + "type " + graphName + "\n" + collection1 + " " + RDF_TYPE + " " + TIM_TABULAR_COLLECTION + " " + graphName + "\n" + collection1 + " " + RDFS_LABEL + " collection1" + "^^" + STRING + " " + graphName + "\n" + fileUri + " " + TIM_HASCOLLECTION + " " + collection1 + " " + graphName + "\n" + fileUri + " " + TIMBUCTOO_NEXT + " " + collection1 + " " + graphName + "\n" + prop + "tim_id " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection1 + " " + TIM_HAS_PROPERTY + " " + prop + "tim_id " + graphName + "\n" + prop + "tim_id " + TIM_PROP_ID + " -1" + "^^" + INTEGER + " " + graphName + "\n" + prop + "tim_id " + RDFS_LABEL + " tim_id" + "^^" + STRING + " " + graphName + "\n" + prop + "propName1 " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection1 + " " + TIM_HAS_PROPERTY + " " + prop + "propName1 " + graphName + "\n" + prop + "propName1 " + TIM_PROP_ID + " 1" + "^^" + INTEGER + " " + graphName + "\n" + prop + "propName1 " + RDFS_LABEL + " propName1" + "^^" + STRING + " " + graphName + "\n" + prop + "tim_id " + TIMBUCTOO_NEXT + " " + prop + "propName1 " + graphName + "\n" + prop + "propName2 " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection1 + " " + TIM_HAS_PROPERTY + " " + prop + "propName2 " + graphName + "\n" + prop + "propName2 " + TIM_PROP_ID + " 2" + "^^" + INTEGER + " " + graphName + "\n" + prop + "propName2 " + RDFS_LABEL + " propName2" + "^^" + STRING + " " + graphName + "\n" + prop + "propName1 " + TIMBUCTOO_NEXT + " " + prop + "propName2 " + graphName + "\n" + rowData + "1 " + RDF_TYPE + " " + collection1 + " " + graphName + "\n" + collection1 + " " + TIM_HAS_ROW + " " + rowData + "1 " + graphName + "\n" + rowData + "1 " + prop + "propName1" + " value1" + "^^" + STRING + " " + graphName + "\n" + rowData + "1 " + prop + "propName2" + " val2" + "^^" + STRING + " " + graphName + "\n" + rowData + "1 " + prop + "tim_id" + " {UUID}" + "^^" + STRING + " " + graphName + "\n" + rowData + "2 " + RDF_TYPE + " " + collection1 + " " + graphName + "\n" + collection1 + " " + TIM_HAS_ROW + " " + rowData + "2 " + graphName + "\n" + rowData + "2 " + prop + "propName1" + " entVal1" + "^^" + STRING + " " + graphName + "\n" + rowData + "2 " + prop + "propName2" + " entVal2" + "^^" + STRING + " " + graphName + "\n" + rowData + "2 " + prop + "tim_id" + " {UUID}" + "^^" + STRING + " " + graphName + "\n" + collection2 + " " + RDF_TYPE + " " + collection2 + "type " + graphName + "\n" + collection2 + " " + RDF_TYPE + " " + TIM_TABULAR_COLLECTION + " " + graphName + "\n" + collection2 + " " + RDFS_LABEL + " collection2" + "^^" + STRING + " " + graphName + "\n" + fileUri + " " + TIM_HASCOLLECTION + " " + collection2 + " " + graphName + "\n" + collection1 + " " + TIMBUCTOO_NEXT + " " + collection2 + " " + graphName + "\n" + prop + "tim_id " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection2 + " " + TIM_HAS_PROPERTY + " " + prop + "tim_id " + graphName + "\n" + prop + "tim_id " + TIM_PROP_ID + " -1" + "^^" + INTEGER + " " + graphName + "\n" + prop + "tim_id " + RDFS_LABEL + " tim_id" + "^^" + STRING + " " + graphName + "\n" + prop + "prop3 " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection2 + " " + TIM_HAS_PROPERTY + " " + prop + "prop3 " + graphName + "\n" + prop + "prop3 " + TIM_PROP_ID + " 1" + "^^" + INTEGER + " " + graphName + "\n" + prop + "prop3 " + RDFS_LABEL + " prop3" + "^^" + STRING + " " + graphName + "\n" + prop + "tim_id " + TIMBUCTOO_NEXT + " " + prop + "prop3 " + graphName + "\n" + prop + "prop4 " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection2 + " " + TIM_HAS_PROPERTY + " " + prop + "prop4 " + graphName + "\n" + prop + "prop4 " + TIM_PROP_ID + " 2" + "^^" + INTEGER + " " + graphName + "\n" + prop + "prop4 " + RDFS_LABEL + " prop4" + "^^" + STRING + " " + graphName + "\n" + prop + "prop3 " + TIMBUCTOO_NEXT + " " + prop + "prop4 " + graphName + "\n" + rowData + "3 " + RDF_TYPE + " " + collection2 + " " + graphName + "\n" + collection2 + " " + TIM_HAS_ROW + " " + rowData + "3 " + graphName + "\n" + rowData + "3 " + prop + "prop3" + " value1" + "^^" + STRING + " " + graphName + "\n" + rowData + "3 " + prop + "prop4" + " val2" + "^^" + STRING + " " + graphName + "\n" + rowData + "3 " + prop + "tim_id" + " {UUID}" + "^^" + STRING + " " + graphName + "\n" + rowData + "4 " + RDF_TYPE + " " + collection2 + " " + graphName + "\n" + collection2 + " " + TIM_HAS_ROW + " " + rowData + "4 " + graphName + "\n" + rowData + "4 " + prop + "prop3" + " entVal1" + "^^" + STRING + " " + graphName + "\n" + rowData + "4 " + prop + "prop4" + " entVal2" + "^^" + STRING + " " + graphName + "\n" + rowData + "4 " + prop + "tim_id" + " {UUID}" + "^^" + STRING + " " + graphName + "\n", generatedRdf.replaceAll("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}", "{UUID}"));
}
use of nl.knaw.huygens.timbuctoo.v5.filestorage.exceptions.LogStorageFailedException in project timbuctoo by HuygensING.
the class RmlRdfCreator method sendQuads.
@Override
public void sendQuads(RdfSerializer saver, DataSet dataSet, Consumer<String> status) throws LogStorageFailedException {
RdfDataSourceFactory dataSourceFactory = dataSet.getDataSource();
final Model model = ModelFactory.createDefaultModel();
try {
model.read(new ByteArrayInputStream(rdfData.getBytes(StandardCharsets.UTF_8)), null, "JSON-LD");
} catch (Exception e) {
throw new LogStorageFailedException(e);
}
final RmlMappingDocument rmlMappingDocument = rmlBuilder.fromRdf(model, dataSourceFactory::apply);
if (rmlMappingDocument.getErrors().size() > 0) {
throw new LogStorageFailedException("failure: " + String.join("\nfailure: ", rmlMappingDocument.getErrors()) + "\n");
}
// FIXME: trigger onprefix for all rml prefixes
// FIXME: store rml and retrieve it from tripleStore when mapping
Stream<Quad> triples = rmlMappingDocument.execute(new ReportingErrorHandler(status));
Iterator<Quad> iterator = triples.iterator();
while (iterator.hasNext()) {
Quad triple = iterator.next();
saver.onQuad(triple.getSubject().getUri().get(), triple.getPredicate().getUri().get(), triple.getObject().getContent(), triple.getObject().getLiteralType().orElse(null), triple.getObject().getLiteralLanguage().orElse(null), baseUri);
}
}
use of nl.knaw.huygens.timbuctoo.v5.filestorage.exceptions.LogStorageFailedException in project timbuctoo by HuygensING.
the class ImportManager method generateLog.
public Future<ImportStatus> generateLog(String baseUri, String defaultGraph, RdfCreator creator) throws LogStorageFailedException {
importStatus.start(this.getClass().getSimpleName() + ".generateLog", baseUri);
try {
// add to the log structure
int[] index = new int[1];
logListStore.updateData(logList -> {
index[0] = logList.addEntry(LogEntry.create(baseUri, defaultGraph, creator));
return logList;
});
// schedule processing
return executorService.submit(() -> processLogsUntil(index[0]));
} catch (IOException e) {
importStatus.addError("Could not update logList", e);
throw new LogStorageFailedException(e);
}
}
use of nl.knaw.huygens.timbuctoo.v5.filestorage.exceptions.LogStorageFailedException in project timbuctoo by HuygensING.
the class DataSetMetadataMutation method get.
@Override
public Object get(DataFetchingEnvironment env) {
DataSet dataSet = MutationHelpers.getDataSet(env, dataSetRepository::getDataSet);
MutationHelpers.checkAdminPermissions(env, dataSet.getMetadata());
try {
Map md = env.getArgument("metadata");
final String baseUri = dataSet.getMetadata().getBaseUri().endsWith("/") ? dataSet.getMetadata().getBaseUri() : dataSet.getMetadata().getBaseUri() + "/";
addMutation(dataSet, new PredicateMutation().entity(baseUri, this.<String>parseProp(md, "title", v -> replace("http://purl.org/dc/terms/title", value(v))), this.<String>parseProp(md, "description", v -> replace("http://purl.org/dc/terms/description", value(v, MARKDOWN))), this.<String>parseProp(md, "imageUrl", v -> replace("http://xmlns.com/foaf/0.1/depiction", value(v))), this.<String>parseProp(md, "license", v -> replace("http://purl.org/dc/terms/license", subject(v))), this.<Map>parseProp(md, "owner", owner -> getOrCreate("http://purl.org/dc/terms/rightsHolder", baseUri + "rightsHolder", this.<String>parseProp(owner, "name", v -> replace("http://schema.org/name", value(v))), this.<String>parseProp(owner, "email", v -> replace("http://schema.org/email", value(v))))), this.<Map>parseProp(md, "contact", owner -> getOrCreate("http://schema.org/ContactPoint", baseUri + "ContactPoint", this.<String>parseProp(owner, "name", v -> replace("http://schema.org/name", value(v))), this.<String>parseProp(owner, "email", v -> replace("http://schema.org/email", value(v))))), this.<Map>parseProp(md, "provenanceInfo", owner -> getOrCreate("http://purl.org/dc/terms/provenance", baseUri + "Provenance", this.<String>parseProp(owner, "title", v -> replace("http://purl.org/dc/terms/title", value(v))), this.<String>parseProp(owner, "body", v -> replace("http://purl.org/dc/terms/description", value(v, MARKDOWN)))))));
return new DataSetWithDatabase(dataSet);
} catch (LogStorageFailedException | InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
}
Aggregations