use of nl.knaw.huygens.timbuctoo.v5.dataset.exceptions.DataStoreCreationException in project timbuctoo by HuygensING.
the class Rml method upload.
@POST
public Response upload(final String rdfData, @PathParam("userId") final String ownerId, @PathParam("dataSetId") final String dataSetId, @HeaderParam("authorization") String authHeader) throws DataStoreCreationException, LogStorageFailedException, ExecutionException, InterruptedException {
Optional<User> user;
try {
user = userValidator.getUserFromAccessToken(authHeader);
} catch (UserValidationException e) {
LOG.error("Exception validating user", e);
return Response.status(Response.Status.UNAUTHORIZED).build();
}
if (!user.isPresent()) {
return Response.status(Response.Status.UNAUTHORIZED).build();
}
final Optional<DataSet> dataSet = dataSetRepository.getDataSet(user.get(), ownerId, dataSetId);
if (dataSet.isPresent()) {
ImportManager importManager = dataSet.get().getImportManager();
final String baseUri = dataSet.get().getMetadata().getBaseUri();
Future<ImportStatus> promise = importManager.generateLog(baseUri, baseUri, new RmlRdfCreator(baseUri, rdfData));
return handleImportManagerResult(promise);
} else {
return errorResponseHelper.dataSetNotFound(ownerId, dataSetId);
}
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.exceptions.DataStoreCreationException in project timbuctoo by HuygensING.
the class TabularUpload method upload.
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
@POST
public Response upload(@FormDataParam("file") final InputStream rdfInputStream, @FormDataParam("file") final FormDataBodyPart body, @FormDataParam("file") final FormDataContentDisposition fileInfo, @FormDataParam("fileMimeTypeOverride") final MediaType mimeTypeOverride, FormDataMultiPart formData, @HeaderParam("authorization") final String authHeader, @PathParam("userId") final String ownerId, @PathParam("dataSetId") final String dataSetId, @QueryParam("forceCreation") boolean forceCreation) throws DataStoreCreationException, FileStorageFailedException, ExecutionException, InterruptedException, LogStorageFailedException {
final Either<Response, Response> result = authCheck.getOrCreate(authHeader, ownerId, dataSetId, forceCreation).flatMap(userAndDs -> authCheck.hasAdminAccess(userAndDs.getLeft(), userAndDs.getRight())).map(userAndDs -> {
final MediaType mediaType = mimeTypeOverride == null ? body.getMediaType() : mimeTypeOverride;
Optional<Loader> loader = LoaderFactory.createFor(mediaType.toString(), formData.getFields().entrySet().stream().filter(entry -> entry.getValue().size() > 0).filter(entry -> entry.getValue().get(0) != null).filter(entry -> MediaTypes.typeEqual(MediaType.TEXT_PLAIN_TYPE, entry.getValue().get(0).getMediaType())).collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().get(0).getValue())));
if (!loader.isPresent()) {
return errorResponseHelper.error(400, "We do not support the mediatype '" + mediaType + "'. Make sure to add the correct mediatype to the file " + "parameter. In curl you'd use `-F \"file=@<filename>;type=<mediatype>\"`. In a webbrowser you probably " + "have no way of setting the correct mimetype. So you can use a special parameter to override it: " + "`formData.append(\"fileMimeTypeOverride\", \"<mimetype>\");`");
}
final DataSet dataSet = userAndDs.getRight();
ImportManager importManager = dataSet.getImportManager();
if (StringUtils.isBlank(fileInfo.getName())) {
return Response.status(400).entity("filename cannot be empty.").build();
}
try {
String fileToken = importManager.addFile(rdfInputStream, fileInfo.getFileName(), mediaType);
Future<ImportStatus> promise = importManager.generateLog(dataSet.getMetadata().getBaseUri(), dataSet.getMetadata().getBaseUri(), new TabularRdfCreator(loader.get(), fileToken, fileInfo.getFileName()));
return handleImportManagerResult(promise);
} catch (FileStorageFailedException | LogStorageFailedException e) {
LOG.error("Tabular upload failed", e);
return Response.serverError().build();
}
});
if (result.isLeft()) {
return result.getLeft();
} else {
return result.get();
}
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.exceptions.DataStoreCreationException in project timbuctoo by HuygensING.
the class DataSetRepository method loadDataSetsFromJson.
private void loadDataSetsFromJson() throws IOException {
Map<String, Set<DataSetMetaData>> ownerMetadataMap = dataStorage.loadDataSetMetaData();
synchronized (dataSetMap) {
for (Map.Entry<String, Set<DataSetMetaData>> entry : ownerMetadataMap.entrySet()) {
String ownerId = entry.getKey();
Set<DataSetMetaData> ownerMetaDatas = entry.getValue();
HashMap<String, DataSet> ownersSets = new HashMap<>();
dataSetMap.put(ownerId, ownersSets);
for (DataSetMetaData dataSetMetaData : ownerMetaDatas) {
String dataSetName = dataSetMetaData.getDataSetId();
try {
ownersSets.put(dataSetName, dataSet(dataSetMetaData, executorService, rdfBaseUri, dataStoreFactory, () -> onUpdated.accept(dataSetMetaData.getCombinedId()), dataStorage.getDataSetStorage(ownerId, dataSetName)));
} catch (DataStoreCreationException e) {
throw new IOException(e);
}
}
}
}
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.exceptions.DataStoreCreationException in project timbuctoo by HuygensING.
the class DataSetRepository method createDataSet.
public DataSet createDataSet(User user, String dataSetId) throws DataStoreCreationException, IllegalDataSetNameException {
// The ownerId might not be valid (i.e. a safe string). We make it safe here:
// dataSetId is under the control of the user so we simply throw if it's not valid
String ownerPrefix = "u" + user.getPersistentId();
final String baseUri = rdfIdHelper.dataSetBaseUri(ownerPrefix, dataSetId);
String uriPrefix;
if (!baseUri.endsWith("/") && !baseUri.endsWith("#") && !baseUri.endsWith("?")) {
// #boo&foo=bar
if (baseUri.contains("#") || baseUri.contains("?")) {
if (baseUri.endsWith("&")) {
uriPrefix = baseUri;
} else {
uriPrefix = baseUri + "&";
}
} else {
uriPrefix = baseUri + "/";
}
} else {
uriPrefix = baseUri;
}
final DataSetMetaData dataSet = new BasicDataSetMetaData(ownerPrefix, dataSetId, baseUri, uriPrefix, false, publicByDefault);
try {
dataStorage.getDataSetStorage(ownerPrefix, dataSetId).saveMetaData(dataSet);
} catch (DataStorageSaveException e) {
throw new DataStoreCreationException(e);
}
synchronized (dataSetMap) {
Map<String, DataSet> userDataSets = dataSetMap.computeIfAbsent(ownerPrefix, key -> new HashMap<>());
if (!userDataSets.containsKey(dataSetId)) {
try {
permissionFetcher.initializeOwnerAuthorization(user, dataSet.getOwnerId(), dataSet.getDataSetId());
userDataSets.put(dataSetId, dataSet(dataSet, executorService, rdfBaseUri, dataStoreFactory, () -> onUpdated.accept(dataSet.getCombinedId()), dataStorage.getDataSetStorage(ownerPrefix, dataSetId)));
} catch (PermissionFetchingException | AuthorizationCreationException | IOException e) {
throw new DataStoreCreationException(e);
}
}
return userDataSets.get(dataSetId);
}
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.exceptions.DataStoreCreationException in project timbuctoo by HuygensING.
the class DataSet method dataSet.
public static DataSet dataSet(DataSetMetaData metadata, ExecutorService executorService, String rdfPrefix, BdbEnvironmentCreator dataStoreFactory, Runnable onUpdated, DataSetStorage dataSetStorage) throws IOException, DataStoreCreationException {
String userId = metadata.getOwnerId();
String dataSetId = metadata.getDataSetId();
File descriptionFile = dataSetStorage.getResourceSyncDescriptionFile();
FileStorage fileStorage = dataSetStorage.getFileStorage();
ImportManager importManager = new ImportManager(dataSetStorage.getLogList(), fileStorage, fileStorage, dataSetStorage.getLogStorage(), executorService, dataSetStorage.getRdfIo(), onUpdated);
try {
importManager.subscribeToRdf(new RdfDescriptionSaver(descriptionFile, metadata.getBaseUri(), importManager.getImportStatus()));
} catch (ParserConfigurationException | SAXException e) {
LOG.error("Could not construct import manager of data set", e);
}
final TupleBinding<String> stringBinding = TupleBinding.getPrimitiveBinding(String.class);
try {
StringStringIsCleanHandler stringStringIsCleanHandler = new StringStringIsCleanHandler();
BdbTripleStore quadStore = new BdbTripleStore(dataStoreFactory.getDatabase(userId, dataSetId, "rdfData", true, stringBinding, stringBinding, stringStringIsCleanHandler));
final BdbTypeNameStore typeNameStore = new BdbTypeNameStore(new BdbBackedData(dataStoreFactory.getDatabase(userId, dataSetId, "typenames", false, stringBinding, stringBinding, stringStringIsCleanHandler)), rdfPrefix);
final BdbSchemaStore schema = new BdbSchemaStore(new BdbBackedData(dataStoreFactory.getDatabase(userId, dataSetId, "schema", false, stringBinding, stringBinding, stringStringIsCleanHandler)), importManager.getImportStatus());
final BdbTruePatchStore truePatchStore = new BdbTruePatchStore(dataStoreFactory.getDatabase(userId, dataSetId, "truePatch", true, stringBinding, stringBinding, stringStringIsCleanHandler));
final TupleBinding<Integer> integerBinding = TupleBinding.getPrimitiveBinding(Integer.class);
final UpdatedPerPatchStore updatedPerPatchStore = new UpdatedPerPatchStore(dataStoreFactory.getDatabase(userId, dataSetId, "updatedPerPatch", true, integerBinding, stringBinding, new IsCleanHandler<Integer, String>() {
@Override
public Integer getKey() {
return Integer.MAX_VALUE;
}
@Override
public String getValue() {
return "isClean";
}
}));
final BdbRmlDataSourceStore rmlDataSourceStore = new BdbRmlDataSourceStore(dataStoreFactory.getDatabase(userId, dataSetId, "rmlSource", true, stringBinding, stringBinding, stringStringIsCleanHandler), importManager.getImportStatus());
VersionStore versionStore = new VersionStore(dataStoreFactory.getDatabase(userId, dataSetId, "versions", false, stringBinding, integerBinding, new IsCleanHandler<String, Integer>() {
@Override
public String getKey() {
return "isClean";
}
@Override
public Integer getValue() {
return Integer.MAX_VALUE;
}
}));
final StoreUpdater storeUpdater = new StoreUpdater(dataStoreFactory, quadStore, typeNameStore, truePatchStore, updatedPerPatchStore, Lists.newArrayList(schema, rmlDataSourceStore), versionStore, importManager.getImportStatus());
importManager.subscribeToRdf(storeUpdater);
ImmutableDataSet dataSet = ImmutableDataSet.builder().ownerId(userId).dataSetName(dataSetId).bdbEnvironmentCreator(dataStoreFactory).metadata(metadata).quadStore(quadStore).typeNameStore(typeNameStore).schemaStore(schema).dataSource(new RdfDataSourceFactory(rmlDataSourceStore)).schemaStore(schema).importManager(importManager).dataSetStorage(dataSetStorage).build();
importManager.init(dataSet);
if (!quadStore.isClean() || !typeNameStore.isClean() || !schema.isClean() || !truePatchStore.isClean() || !updatedPerPatchStore.isClean() || !rmlDataSourceStore.isClean() || !versionStore.isClean()) {
LOG.error("Data set '{}__{}' data is corrupted, starting to reimport.", userId, dataSetId);
quadStore.empty();
typeNameStore.empty();
schema.empty();
truePatchStore.empty();
updatedPerPatchStore.empty();
rmlDataSourceStore.empty();
versionStore.empty();
importManager.reprocessLogs();
} else {
// process unprocessed logs
importManager.processLogs();
}
return dataSet;
} catch (BdbDbCreationException e) {
throw new DataStoreCreationException(e.getCause());
}
}
Aggregations