use of nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSet in project timbuctoo by HuygensING.
the class RawUploadRdfSaverTest method setUp.
@Before
public void setUp() throws Exception {
rdfSerializer = mock(RdfSerializer.class);
dataSetMetadata = new BasicDataSetMetaData("userid", "dataset", "http://timbuctoo.huygens.knaw.nl/v5/datasets/userid/dataset", "http://example.org/prefix/", false, false);
instance = instanceWithRdfSerializer(rdfSerializer, dataSetMetadata);
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSet in project timbuctoo by HuygensING.
the class DataSetMetadataMutation method get.
@Override
public Object get(DataFetchingEnvironment env) {
DataSet dataSet = MutationHelpers.getDataSet(env, dataSetRepository::getDataSet);
MutationHelpers.checkAdminPermissions(env, dataSet.getMetadata());
try {
Map md = env.getArgument("metadata");
final String baseUri = dataSet.getMetadata().getBaseUri().endsWith("/") ? dataSet.getMetadata().getBaseUri() : dataSet.getMetadata().getBaseUri() + "/";
addMutation(dataSet, new PredicateMutation().entity(baseUri, this.<String>parseProp(md, "title", v -> replace("http://purl.org/dc/terms/title", value(v))), this.<String>parseProp(md, "description", v -> replace("http://purl.org/dc/terms/description", value(v, MARKDOWN))), this.<String>parseProp(md, "imageUrl", v -> replace("http://xmlns.com/foaf/0.1/depiction", value(v))), this.<String>parseProp(md, "license", v -> replace("http://purl.org/dc/terms/license", subject(v))), this.<Map>parseProp(md, "owner", owner -> getOrCreate("http://purl.org/dc/terms/rightsHolder", baseUri + "rightsHolder", this.<String>parseProp(owner, "name", v -> replace("http://schema.org/name", value(v))), this.<String>parseProp(owner, "email", v -> replace("http://schema.org/email", value(v))))), this.<Map>parseProp(md, "contact", owner -> getOrCreate("http://schema.org/ContactPoint", baseUri + "ContactPoint", this.<String>parseProp(owner, "name", v -> replace("http://schema.org/name", value(v))), this.<String>parseProp(owner, "email", v -> replace("http://schema.org/email", value(v))))), this.<Map>parseProp(md, "provenanceInfo", owner -> getOrCreate("http://purl.org/dc/terms/provenance", baseUri + "Provenance", this.<String>parseProp(owner, "title", v -> replace("http://purl.org/dc/terms/title", value(v))), this.<String>parseProp(owner, "body", v -> replace("http://purl.org/dc/terms/description", value(v, MARKDOWN)))))));
return new DataSetWithDatabase(dataSet);
} catch (LogStorageFailedException | InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSet in project timbuctoo by HuygensING.
the class ExtendSchemaMutation method get.
@Override
public Object get(DataFetchingEnvironment env) {
DataSet dataSet = MutationHelpers.getDataSet(env, dataSetRepository::getDataSet);
MutationHelpers.checkAdminPermissions(env, dataSet.getMetadata());
final SchemaStore generatedSchema = dataSet.getSchemaStore();
Map<String, Type> customTypes = new HashMap<>();
List<ExplicitType> customSchema;
Map<String, List<ExplicitField>> newCustomSchema = new HashMap<>();
try {
String customSchemaString = OBJECT_MAPPER.writeValueAsString(env.getArgument("customSchema"));
customSchema = OBJECT_MAPPER.readValue(customSchemaString, new TypeReference<List<ExplicitType>>() {
});
} catch (IOException e) {
throw new RuntimeException("Could not parse the schema input");
}
for (ExplicitType explicitType : customSchema) {
customTypes.put(explicitType.getCollectionId(), explicitType.convertToType());
newCustomSchema.put(explicitType.getCollectionId(), explicitType.getFields());
}
Map<String, List<ExplicitField>> existingCustomSchema = dataSet.getCustomSchema();
Map<String, Type> existingCustomSchemaTypes = new HashMap<>();
for (Map.Entry<String, List<ExplicitField>> entry : existingCustomSchema.entrySet()) {
ExplicitType tempExplicitType = new ExplicitType(entry.getKey(), entry.getValue());
existingCustomSchemaTypes.put(entry.getKey(), tempExplicitType.convertToType());
}
MergeSchemas mergeSchemas = new MergeSchemas();
customTypes = mergeSchemas.mergeSchema(existingCustomSchemaTypes, customTypes);
MergeExplicitSchemas mergeExplicitSchemas = new MergeExplicitSchemas();
Map<String, List<ExplicitField>> mergedExplicitSchema = mergeExplicitSchemas.mergeExplicitSchemas(existingCustomSchema, newCustomSchema);
try {
dataSet.saveCustomSchema(mergedExplicitSchema);
} catch (IOException e) {
LOG.error("Saving the custom schema failed", e);
throw new RuntimeException(e);
}
Map<String, Type> mergedSchema = mergeSchemas.mergeSchema(generatedSchema.getStableTypes(), customTypes);
for (Map.Entry<String, Type> customType : customTypes.entrySet()) {
if (!mergedSchema.containsKey(customType.getKey())) {
return ImmutableMap.of("message", "Schema extension was unsuccessful.");
}
}
return ImmutableMap.of("message", "Schema extended successfully.");
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSet in project timbuctoo by HuygensING.
the class IndexConfigMutation method get.
@Override
public Object get(DataFetchingEnvironment env) {
String collectionUri = env.getArgument("collectionUri");
Object indexConfig = env.getArgument("indexConfig");
DataSet dataSet = MutationHelpers.getDataSet(env, dataSetRepository::getDataSet);
MutationHelpers.checkAdminPermissions(env, dataSet.getMetadata());
try {
MutationHelpers.addMutation(dataSet, new PredicateMutation().entity(collectionUri, replace(TIM_HASINDEXERCONFIG, value(OBJECT_MAPPER.writeValueAsString(indexConfig)))));
return indexConfig;
} catch (LogStorageFailedException | InterruptedException | ExecutionException | JsonProcessingException e) {
throw new RuntimeException(e);
}
}
use of nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSet in project timbuctoo by HuygensING.
the class MakePublicMutation method get.
@Override
public Object get(DataFetchingEnvironment env) {
User user = MutationHelpers.getUser(env);
DataSet dataSet = MutationHelpers.getDataSet(env, dataSetRepository::getDataSet);
try {
dataSetRepository.publishDataSet(user, dataSet.getMetadata().getOwnerId(), dataSet.getMetadata().getDataSetId());
} catch (DataSetPublishException e) {
LOG.error("Failed to publish data set", e);
throw new RuntimeException("Failed to publish data set");
}
return new DataSetWithDatabase(dataSet);
}
Aggregations