use of nl.knaw.huygens.timbuctoo.v5.dataset.ImportStatus in project timbuctoo by HuygensING.
the class Rml method upload.
@POST
public Response upload(final String rdfData, @PathParam("userId") final String ownerId, @PathParam("dataSetId") final String dataSetId, @HeaderParam("authorization") String authHeader) throws DataStoreCreationException, LogStorageFailedException, ExecutionException, InterruptedException {
Optional<User> user;
try {
user = userValidator.getUserFromAccessToken(authHeader);
} catch (UserValidationException e) {
LOG.error("Exception validating user", e);
return Response.status(Response.Status.UNAUTHORIZED).build();
}
if (!user.isPresent()) {
return Response.status(Response.Status.UNAUTHORIZED).build();
}
final Optional<DataSet> dataSet = dataSetRepository.getDataSet(user.get(), ownerId, dataSetId);
if (dataSet.isPresent()) {
ImportManager importManager = dataSet.get().getImportManager();
final String baseUri = dataSet.get().getMetadata().getBaseUri();
Future<ImportStatus> promise = importManager.generateLog(baseUri, baseUri, new RmlRdfCreator(baseUri, rdfData));
return handleImportManagerResult(promise);
} else {
return errorResponseHelper.dataSetNotFound(ownerId, dataSetId);
}
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.ImportStatus in project timbuctoo by HuygensING.
the class TabularUpload method upload.
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
@POST
public Response upload(@FormDataParam("file") final InputStream rdfInputStream, @FormDataParam("file") final FormDataBodyPart body, @FormDataParam("file") final FormDataContentDisposition fileInfo, @FormDataParam("fileMimeTypeOverride") final MediaType mimeTypeOverride, FormDataMultiPart formData, @HeaderParam("authorization") final String authHeader, @PathParam("userId") final String ownerId, @PathParam("dataSetId") final String dataSetId, @QueryParam("forceCreation") boolean forceCreation) throws DataStoreCreationException, FileStorageFailedException, ExecutionException, InterruptedException, LogStorageFailedException {
final Either<Response, Response> result = authCheck.getOrCreate(authHeader, ownerId, dataSetId, forceCreation).flatMap(userAndDs -> authCheck.hasAdminAccess(userAndDs.getLeft(), userAndDs.getRight())).map(userAndDs -> {
final MediaType mediaType = mimeTypeOverride == null ? body.getMediaType() : mimeTypeOverride;
Optional<Loader> loader = LoaderFactory.createFor(mediaType.toString(), formData.getFields().entrySet().stream().filter(entry -> entry.getValue().size() > 0).filter(entry -> entry.getValue().get(0) != null).filter(entry -> MediaTypes.typeEqual(MediaType.TEXT_PLAIN_TYPE, entry.getValue().get(0).getMediaType())).collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().get(0).getValue())));
if (!loader.isPresent()) {
return errorResponseHelper.error(400, "We do not support the mediatype '" + mediaType + "'. Make sure to add the correct mediatype to the file " + "parameter. In curl you'd use `-F \"file=@<filename>;type=<mediatype>\"`. In a webbrowser you probably " + "have no way of setting the correct mimetype. So you can use a special parameter to override it: " + "`formData.append(\"fileMimeTypeOverride\", \"<mimetype>\");`");
}
final DataSet dataSet = userAndDs.getRight();
ImportManager importManager = dataSet.getImportManager();
if (StringUtils.isBlank(fileInfo.getName())) {
return Response.status(400).entity("filename cannot be empty.").build();
}
try {
String fileToken = importManager.addFile(rdfInputStream, fileInfo.getFileName(), mediaType);
Future<ImportStatus> promise = importManager.generateLog(dataSet.getMetadata().getBaseUri(), dataSet.getMetadata().getBaseUri(), new TabularRdfCreator(loader.get(), fileToken, fileInfo.getFileName()));
return handleImportManagerResult(promise);
} catch (FileStorageFailedException | LogStorageFailedException e) {
LOG.error("Tabular upload failed", e);
return Response.serverError().build();
}
});
if (result.isLeft()) {
return result.getLeft();
} else {
return result.get();
}
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.ImportStatus in project timbuctoo by HuygensING.
the class ImportManager method addLog.
public Future<ImportStatus> addLog(String baseUri, String defaultGraph, String fileName, InputStream rdfInputStream, Optional<Charset> charset, MediaType mediaType) throws LogStorageFailedException {
importStatus.start(this.getClass().getSimpleName() + ".addLog", baseUri);
int[] index = new int[1];
try {
String token = logStorage.saveLog(rdfInputStream, fileName, mediaType, charset);
logListStore.updateData(logList -> {
index[0] = logList.addEntry(LogEntry.create(baseUri, defaultGraph, token));
return logList;
});
} catch (IOException e) {
importStatus.addError("Could not save log", e);
throw new LogStorageFailedException(e);
}
return executorService.submit(() -> processLogsUntil(index[0]));
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.ImportStatus in project timbuctoo by HuygensING.
the class ImportManager method processLogsUntil.
private synchronized ImportStatus processLogsUntil(int maxIndex) {
importStatus.start(this.getClass().getSimpleName() + ".processLogs", null);
ListIterator<LogEntry> unprocessed = logListStore.getData().getUnprocessed();
boolean dataWasAdded = false;
while (unprocessed.hasNext() && unprocessed.nextIndex() <= maxIndex) {
int index = unprocessed.nextIndex();
LogEntry entry = unprocessed.next();
importStatus.startEntry(entry);
if (entry.getLogToken().isPresent()) {
// logToken
String logToken = entry.getLogToken().get();
try (CachedLog log = logStorage.getLog(logToken)) {
final Stopwatch stopwatch = Stopwatch.createStarted();
for (RdfProcessor processor : subscribedProcessors) {
if (processor.getCurrentVersion() <= index) {
String msg = "******* " + processor.getClass().getSimpleName() + " Started importing full log...";
LOG.info(msg);
importStatus.setStatus(msg);
RdfParser rdfParser = serializerFactory.makeRdfParser(log);
processor.start(index);
rdfParser.importRdf(log, entry.getBaseUri(), entry.getDefaultGraph(), processor);
processor.commit();
}
}
long elapsedTime = stopwatch.elapsed(TimeUnit.SECONDS);
String msg = "Finished importing. Total import took " + elapsedTime + " seconds.";
LOG.info(msg);
importStatus.setStatus(msg);
dataWasAdded = true;
} catch (Exception e) {
LOG.error("Processing log failed", e);
importStatus.addError("Processing log failed", e);
}
// Update the log, even after RdfProcessingFailedException | IOException
try {
logListStore.updateData(logList -> {
logList.markAsProcessed(index);
return logList;
});
} catch (IOException e) {
LOG.error("Updating the log failed", e);
importStatus.addError("Updating log failed", e);
}
} else {
// no logToken
RdfCreator creator = entry.getRdfCreator().get();
String token = "";
MediaType mediaType;
Optional<Charset> charset;
File tempFile = null;
try {
tempFile = File.createTempFile("log_to_generate", "nq");
try (OutputStream stream = new GZIPOutputStream(new FileOutputStream(tempFile))) {
if (creator instanceof PlainRdfCreator) {
try (RdfSerializer serializer = serializerFactory.makeRdfSerializer(stream)) {
mediaType = serializer.getMediaType();
charset = Optional.of(serializer.getCharset());
((PlainRdfCreator) creator).sendQuads(serializer, dataSet, importStatus::setStatus);
} catch (Exception e) {
LOG.error("Log generation failed", e);
importStatus.addError("Log generation failed", e);
break;
}
} else {
try (RdfPatchSerializer srlzr = serializerFactory.makeRdfPatchSerializer(stream, entry.getBaseUri())) {
mediaType = srlzr.getMediaType();
charset = Optional.of(srlzr.getCharset());
((PatchRdfCreator) creator).sendQuads(srlzr, importStatus::setStatus, dataSet);
} catch (Exception e) {
LOG.error("Log generation failed", e);
importStatus.addError("Log generation failed", e);
break;
}
}
}
try (InputStream inputStream = new GZIPInputStream(new FileInputStream(tempFile))) {
token = logStorage.saveLog(inputStream, "log_generated_by_" + creator.getClass().getSimpleName(), mediaType, charset);
}
LogEntry entryWithLog;
entryWithLog = LogEntry.addLogToEntry(entry, token);
unprocessed.set(entryWithLog);
token = "";
// move back to process this item again
unprocessed.previous();
} catch (Exception e) {
if (token.isEmpty()) {
LOG.error("Log processing failed", e);
} else {
LOG.error("Log processing failed. Log created but not added to the list!", e);
}
importStatus.addError("Log processing failed", e);
break;
} finally {
if (tempFile != null) {
tempFile.delete();
}
}
}
// end else with no condition
importStatus.finishEntry();
}
// end main while loop
if (dataWasAdded) {
webhooks.run();
}
importStatus.finishList();
// update log.json
try {
logListStore.updateData(Function.identity());
} catch (IOException e) {
LOG.error("Updating the log failed", e);
importStatus.addError("Updating log failed", e);
}
return importStatus;
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.ImportStatus in project timbuctoo by HuygensING.
the class RootQuery method rebuildSchema.
public synchronized GraphQLSchema rebuildSchema() {
final TypeDefinitionRegistry staticQuery = schemaParser.parse(this.staticQuery);
if (archetypes != null && !archetypes.isEmpty()) {
staticQuery.merge(schemaParser.parse(archetypes + "extend type DataSetMetadata {\n" + " archetypes: Archetypes! @passThrough\n" + "}\n" + "\n"));
}
TypeDefinitionRegistry registry = new TypeDefinitionRegistry();
registry.merge(staticQuery);
final RuntimeWiring.Builder wiring = RuntimeWiring.newRuntimeWiring();
wiring.type("Query", builder -> builder.dataFetcher("promotedDataSets", env -> dataSetRepository.getPromotedDataSets().stream().map(DataSetWithDatabase::new).collect(Collectors.toList())).dataFetcher("allDataSets", env -> dataSetRepository.getDataSets().stream().map(DataSetWithDatabase::new).filter(x -> {
if (x.isPublished()) {
return true;
} else {
ContextData contextData = env.getContext();
UserPermissionCheck userPermissionCheck = contextData.getUserPermissionCheck();
return userPermissionCheck.getPermissions(x.getDataSet().getMetadata()).contains(Permission.READ);
}
}).collect(Collectors.toList())).dataFetcher("dataSetMetadata", env -> {
final String dataSetId = env.getArgument("dataSetId");
ContextData context = env.getContext();
final User user = context.getUser().orElse(null);
Tuple<String, String> splitCombinedId = DataSetMetaData.splitCombinedId(dataSetId);
return dataSetRepository.getDataSet(user, splitCombinedId.getLeft(), splitCombinedId.getRight()).map(DataSetWithDatabase::new);
}).dataFetcher("dataSetMetadataList", env -> {
Stream<DataSetWithDatabase> dataSets = dataSetRepository.getDataSets().stream().map(DataSetWithDatabase::new);
if (env.getArgument("promotedOnly")) {
dataSets = dataSets.filter(DataSetWithDatabase::isPromoted);
}
if (env.getArgument("publishedOnly")) {
dataSets = dataSets.filter(DataSetWithDatabase::isPublished);
}
return dataSets.filter(x -> {
ContextData contextData = env.getContext();
UserPermissionCheck userPermissionCheck = contextData.getUserPermissionCheck();
return userPermissionCheck.getPermissions(x.getDataSet().getMetadata()).contains(Permission.READ);
}).collect(Collectors.toList());
}).dataFetcher("aboutMe", env -> ((RootData) env.getRoot()).getCurrentUser().orElse(null)).dataFetcher("availableExportMimetypes", env -> supportedFormats.getSupportedMimeTypes().stream().map(MimeTypeDescription::create).collect(Collectors.toList())));
wiring.type("DataSetMetadata", builder -> builder.dataFetcher("currentImportStatus", env -> {
DataSetMetaData input = env.getSource();
Optional<User> currentUser = ((RootData) env.getRoot()).getCurrentUser();
if (!currentUser.isPresent()) {
throw new RuntimeException("User is not provided");
}
return dataSetRepository.getDataSet(currentUser.get(), input.getOwnerId(), input.getDataSetId()).map(dataSet -> dataSet.getImportManager().getImportStatus());
}).dataFetcher("dataSetImportStatus", env -> {
Optional<User> currentUser = ((RootData) env.getRoot()).getCurrentUser();
if (!currentUser.isPresent()) {
throw new RuntimeException("User is not provided");
}
DataSetMetaData input = env.getSource();
return dataSetRepository.getDataSet(currentUser.get(), input.getOwnerId(), input.getDataSetId()).map(dataSet -> dataSet.getImportManager().getDataSetImportStatus());
}).dataFetcher("collectionList", env -> getCollections(env.getSource(), ((ContextData) env.getContext()).getUser())).dataFetcher("collection", env -> {
String collectionId = (String) env.getArguments().get("collectionId");
if (collectionId != null && collectionId.endsWith("List")) {
collectionId = collectionId.substring(0, collectionId.length() - "List".length());
}
DataSetMetaData input = env.getSource();
ContextData context = env.getContext();
final User user = context.getUser().orElse(null);
final DataSet dataSet = dataSetRepository.getDataSet(user, input.getOwnerId(), input.getDataSetId()).get();
final TypeNameStore typeNameStore = dataSet.getTypeNameStore();
String collectionUri = typeNameStore.makeUri(collectionId);
if (dataSet.getSchemaStore().getStableTypes() == null || dataSet.getSchemaStore().getStableTypes().get(collectionUri) == null) {
return null;
} else {
return getCollection(dataSet, typeNameStore, dataSet.getSchemaStore().getStableTypes().get(collectionUri));
}
}).dataFetcher("dataSetId", env -> ((DataSetMetaData) env.getSource()).getCombinedId()).dataFetcher("dataSetName", env -> ((DataSetMetaData) env.getSource()).getDataSetId()).dataFetcher("ownerId", env -> ((DataSetMetaData) env.getSource()).getOwnerId()));
wiring.type("CurrentImportStatus", builder -> builder.dataFetcher("elapsedTime", env -> {
final String timeUnit = env.getArgument("unit");
return ((ImportStatus) env.getSource()).getElapsedTime(timeUnit);
}));
wiring.type("DataSetImportStatus", builder -> builder.dataFetcher("lastImportDuration", env -> {
final String timeUnit = env.getArgument("unit");
return ((DataSetImportStatus) env.getSource()).getLastImportDuration(timeUnit);
}));
wiring.type("EntryImportStatus", builder -> builder.dataFetcher("elapsedTime", env -> {
final String timeUnit = env.getArgument("unit");
return ((EntryImportStatus) env.getSource()).getElapsedTime(timeUnit);
}));
wiring.type("CollectionMetadata", builder -> builder.dataFetcher("indexConfig", env -> {
SubjectReference source = env.getSource();
final QuadStore qs = source.getDataSet().getQuadStore();
try (Stream<CursorQuad> quads = qs.getQuads(source.getSubjectUri(), TIM_HASINDEXERCONFIG, Direction.OUT, "")) {
final Map result = quads.findFirst().map(q -> {
try {
return objectMapper.readValue(q.getObject(), Map.class);
} catch (IOException e) {
LOG.error("Value not a Map", e);
return new HashMap<>();
}
}).orElse(new HashMap());
if (!result.containsKey("facet") || !(result.get("facet") instanceof List)) {
result.put("facet", new ArrayList<>());
}
if (!result.containsKey("fullText") || !(result.get("fullText") instanceof List)) {
result.put("fullText", new ArrayList<>());
}
return result;
}
}).dataFetcher("viewConfig", new ViewConfigFetcher(objectMapper)));
wiring.type("AboutMe", builder -> builder.dataFetcher("dataSets", env -> (Iterable) () -> dataSetRepository.getDataSetsWithWriteAccess(env.getSource()).stream().map(DataSetWithDatabase::new).iterator()).dataFetcher("dataSetMetadataList", env -> (Iterable) () -> {
Stream<DataSetWithDatabase> dataSets = dataSetRepository.getDataSets().stream().map(DataSetWithDatabase::new);
if (env.getArgument("ownOnly")) {
String userId = ((ContextData) env.getContext()).getUser().map(u -> "u" + u.getPersistentId()).orElse(null);
dataSets = dataSets.filter(d -> d.getOwnerId().equals(userId));
}
Permission permission = Permission.valueOf(env.getArgument("permission"));
if (permission != Permission.READ) {
// Read is implied
UserPermissionCheck check = ((ContextData) env.getContext()).getUserPermissionCheck();
dataSets = dataSets.filter(d -> check.getPermissions(d).contains(permission));
}
return dataSets.iterator();
}).dataFetcher("id", env -> ((User) env.getSource()).getPersistentId()).dataFetcher("name", env -> ((User) env.getSource()).getDisplayName()).dataFetcher("personalInfo", env -> "http://example.com").dataFetcher("canCreateDataSet", env -> true));
wiring.type("Mutation", builder -> builder.dataFetcher("setViewConfig", new ViewConfigMutation(dataSetRepository)).dataFetcher("setSummaryProperties", new SummaryPropsMutation(dataSetRepository)).dataFetcher("setIndexConfig", new IndexConfigMutation(dataSetRepository)).dataFetcher("createDataSet", new CreateDataSetMutation(dataSetRepository)).dataFetcher("deleteDataSet", new DeleteDataSetMutation(dataSetRepository)).dataFetcher("publish", new MakePublicMutation(dataSetRepository)).dataFetcher("extendSchema", new ExtendSchemaMutation(dataSetRepository)).dataFetcher("setDataSetMetadata", new DataSetMetadataMutation(dataSetRepository)).dataFetcher("setCollectionMetadata", new CollectionMetadataMutation(dataSetRepository)));
wiring.wiringFactory(wiringFactory);
StringBuilder root = new StringBuilder("type DataSets {\n sillyWorkaroundWhenNoDataSetsAreVisible: Boolean\n");
boolean[] dataSetAvailable = new boolean[] { false };
dataSetRepository.getDataSets().forEach(dataSet -> {
final DataSetMetaData dataSetMetaData = dataSet.getMetadata();
final String name = dataSetMetaData.getCombinedId();
Map<String, Type> types = dataSet.getSchemaStore().getStableTypes();
Map<String, List<ExplicitField>> customSchema = dataSet.getCustomSchema();
final Map<String, Type> customTypes = new HashMap<>();
for (Map.Entry<String, List<ExplicitField>> entry : customSchema.entrySet()) {
ExplicitType explicitType = new ExplicitType(entry.getKey(), entry.getValue());
customTypes.put(entry.getKey(), explicitType.convertToType());
}
Map<String, Type> mergedTypes;
MergeSchemas mergeSchemas = new MergeSchemas();
mergedTypes = mergeSchemas.mergeSchema(types, customTypes);
types = mergedTypes;
if (types != null) {
dataSetAvailable[0] = true;
root.append(" ").append(name).append(":").append(name).append(" @dataSet(userId:\"").append(dataSetMetaData.getOwnerId()).append("\", dataSetId:\"").append(dataSetMetaData.getDataSetId()).append("\")\n");
wiring.type(name, c -> c.dataFetcher("metadata", env -> new DataSetWithDatabase(dataSet)));
final String schema = typeGenerator.makeGraphQlTypes(name, types, dataSet.getTypeNameStore());
staticQuery.merge(schemaParser.parse(schema));
}
});
root.append("}\n\nextend type Query {\n #The actual dataSets\n dataSets: DataSets @passThrough\n}\n\n");
if (dataSetAvailable[0]) {
staticQuery.merge(schemaParser.parse(root.toString()));
}
SchemaGenerator schemaGenerator = new SchemaGenerator();
return schemaGenerator.makeExecutableSchema(staticQuery, wiring.build());
}
Aggregations