Search in sources :

Example 6 with DataSetMetaData

use of nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData in project timbuctoo by HuygensING.

the class PermissionBasedFieldVisibilityTest method createDataSetMetadata.

private DataSetMetaData createDataSetMetadata(String combinedId) {
    DataSetMetaData dataSetMetaData2 = mock(BasicDataSetMetaData.class);
    given(dataSetMetaData2.getCombinedId()).willReturn(combinedId);
    return dataSetMetaData2;
}
Also used : DataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData) BasicDataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.BasicDataSetMetaData)

Example 7 with DataSetMetaData

use of nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData in project timbuctoo by HuygensING.

the class BdbRmlDataSourceStoreTest method itWorks.

@Test
public void itWorks() throws Exception {
    BdbNonPersistentEnvironmentCreator dbCreator = new BdbNonPersistentEnvironmentCreator();
    DataSetMetaData dataSetMetadata = new BasicDataSetMetaData("userid", "datasetid", "http://timbuctoo.huygens.knaw.nl/v5/userid/datasetid", "http://example.org/prefix/", false, false);
    final RmlDataSourceStore rmlDataSourceStore = new BdbRmlDataSourceStore(dbCreator.getDatabase("userid", "datasetid", "rmlSource", true, TupleBinding.getPrimitiveBinding(String.class), TupleBinding.getPrimitiveBinding(String.class), new StringStringIsCleanHandler()), new ImportStatus(new LogList()));
    RdfSerializer rdfSerializer = new RmlDataSourceRdfSerializer(rmlDataSourceStore);
    RawUploadRdfSaver rawUploadRdfSaver = new RawUploadRdfSaver(dataSetMetadata, "fileName", APPLICATION_OCTET_STREAM_TYPE, rdfSerializer, "origFileName", Clock.systemUTC());
    final String inputCol1 = rawUploadRdfSaver.addCollection("collection1");
    ImportPropertyDescriptions importPropertyDescriptions = new ImportPropertyDescriptions();
    importPropertyDescriptions.getOrCreate(1).setPropertyName("propName1");
    importPropertyDescriptions.getOrCreate(2).setPropertyName("propName2");
    rawUploadRdfSaver.addPropertyDescriptions(inputCol1, importPropertyDescriptions);
    rawUploadRdfSaver.addEntity(inputCol1, ImmutableMap.of("propName1", "value1", "propName2", "val2"));
    rawUploadRdfSaver.addEntity(inputCol1, ImmutableMap.of("propName1", "entVal1", "propName2", "entVal2"));
    final String inputCol2 = rawUploadRdfSaver.addCollection("collection2");
    ImportPropertyDescriptions importPropertyDescriptions1 = new ImportPropertyDescriptions();
    importPropertyDescriptions1.getOrCreate(1).setPropertyName("prop3");
    importPropertyDescriptions1.getOrCreate(2).setPropertyName("prop4");
    rawUploadRdfSaver.addPropertyDescriptions(inputCol2, importPropertyDescriptions1);
    rawUploadRdfSaver.addEntity(inputCol2, ImmutableMap.of("prop3", "value1", "prop4", "val2"));
    rawUploadRdfSaver.addEntity(inputCol2, ImmutableMap.of("prop3", "entVal1", "prop4", "entVal2"));
    rdfSerializer.close();
    RdfDataSource rdfDataSource = new RdfDataSource(rmlDataSourceStore, inputCol1, new JexlRowFactory(ImmutableMap.of(), new HashMapBasedJoinHandler()));
    RdfDataSource rdfDataSource2 = new RdfDataSource(rmlDataSourceStore, inputCol2, new JexlRowFactory(ImmutableMap.of(), new HashMapBasedJoinHandler()));
    final List<String> collection1;
    final List<String> collection2;
    try (Stream<Row> stream = rdfDataSource.getRows(new ThrowingErrorHandler())) {
        collection1 = stream.map(x -> x.getRawValue("propName1") + ":" + x.getRawValue("propName2")).collect(toList());
    }
    try (Stream<Row> stream = rdfDataSource2.getRows(new ThrowingErrorHandler())) {
        collection2 = stream.map(x -> x.getRawValue("prop3") + ":" + x.getRawValue("prop4")).collect(toList());
    }
    assertThat(collection1, contains("value1:val2", "entVal1:entVal2"));
    assertThat(collection2, contains("value1:val2", "entVal1:entVal2"));
    dbCreator.close();
}
Also used : RawUploadRdfSaver(nl.knaw.huygens.timbuctoo.v5.bulkupload.RawUploadRdfSaver) ImportPropertyDescriptions(nl.knaw.huygens.timbuctoo.bulkupload.parsingstatemachine.ImportPropertyDescriptions) StringStringIsCleanHandler(nl.knaw.huygens.timbuctoo.v5.berkeleydb.isclean.StringStringIsCleanHandler) RmlDataSourceStore(nl.knaw.huygens.timbuctoo.v5.datastores.rmldatasource.RmlDataSourceStore) BdbNonPersistentEnvironmentCreator(nl.knaw.huygens.timbuctoo.v5.dropwizard.BdbNonPersistentEnvironmentCreator) RdfSerializer(nl.knaw.huygens.timbuctoo.v5.rdfio.RdfSerializer) JexlRowFactory(nl.knaw.huygens.timbuctoo.rml.datasource.jexl.JexlRowFactory) LogList(nl.knaw.huygens.timbuctoo.v5.dataset.dto.LogList) ImportStatus(nl.knaw.huygens.timbuctoo.v5.dataset.ImportStatus) HashMapBasedJoinHandler(nl.knaw.huygens.timbuctoo.rml.datasource.joinhandlers.HashMapBasedJoinHandler) BasicDataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.BasicDataSetMetaData) ThrowingErrorHandler(nl.knaw.huygens.timbuctoo.rml.ThrowingErrorHandler) Row(nl.knaw.huygens.timbuctoo.rml.Row) DataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData) BasicDataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.BasicDataSetMetaData) RdfDataSource(nl.knaw.huygens.timbuctoo.v5.rml.RdfDataSource) Test(org.junit.Test)

Example 8 with DataSetMetaData

use of nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData in project timbuctoo by HuygensING.

the class UserPermissionCheckTest method getPermissionsReturnsNoPermissionsForEmptyUserIfPrivateDataSet.

@Test
public void getPermissionsReturnsNoPermissionsForEmptyUserIfPrivateDataSet() {
    PermissionFetcher permissionFetcher = mock(PermissionFetcher.class);
    Set<Permission> defaultPermissions = Sets.newHashSet(Permission.READ);
    UserPermissionCheck userPermissionCheck = new UserPermissionCheck(Optional.empty(), permissionFetcher, defaultPermissions);
    DataSetMetaData dataSetMetaData = mock(BasicDataSetMetaData.class);
    given(dataSetMetaData.isPublished()).willReturn(false);
    Set<Permission> permissions = userPermissionCheck.getPermissions(dataSetMetaData);
    assertEquals(Sets.newHashSet(), permissions);
}
Also used : PermissionFetcher(nl.knaw.huygens.timbuctoo.v5.security.PermissionFetcher) Permission(nl.knaw.huygens.timbuctoo.v5.security.dto.Permission) DataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData) BasicDataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.BasicDataSetMetaData) Test(org.junit.Test)

Example 9 with DataSetMetaData

use of nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData in project timbuctoo by HuygensING.

the class RawUploadRdfSaverTest method usageTest.

@Test
public void usageTest() throws LogStorageFailedException {
    RdfToStringFaker rdfSerializer = new RdfToStringFaker();
    RawUploadRdfSaver instance = instanceWithRdfSerializer(rdfSerializer, dataSetMetadata);
    final String collection1 = instance.addCollection("collection1");
    ImportPropertyDescriptions importPropertyDescriptions = new ImportPropertyDescriptions();
    importPropertyDescriptions.getOrCreate(1).setPropertyName("propName1");
    importPropertyDescriptions.getOrCreate(2).setPropertyName("propName2");
    instance.addPropertyDescriptions(collection1, importPropertyDescriptions);
    instance.addEntity(collection1, ImmutableMap.of("propName1", "value1", "propName2", "val2"));
    instance.addEntity(collection1, ImmutableMap.of("propName1", "entVal1", "propName2", "entVal2"));
    final String collection2 = instance.addCollection("collection2");
    ImportPropertyDescriptions importPropertyDescriptions1 = new ImportPropertyDescriptions();
    importPropertyDescriptions1.getOrCreate(1).setPropertyName("prop3");
    importPropertyDescriptions1.getOrCreate(2).setPropertyName("prop4");
    instance.addPropertyDescriptions(collection2, importPropertyDescriptions1);
    instance.addEntity(collection2, ImmutableMap.of("prop3", "value1", "prop4", "val2"));
    instance.addEntity(collection2, ImmutableMap.of("prop3", "entVal1", "prop4", "entVal2"));
    String generatedRdf = rdfSerializer.toString();
    // Use assertEquals because the failing Hamcrest output is hard to compare
    String graphName = dataSetMetadata.getBaseUri();
    String fileUri = dataSetMetadata.getUriPrefix() + "rawData/fileName/";
    String prop = fileUri + "props/";
    String rowData = fileUri + "entities/";
    assertEquals(fileUri + " " + RDF_TYPE + " " + TIM_TABULAR_FILE + " " + graphName + "\n" + graphName + " " + PROV_DERIVED_FROM + " " + fileUri + " " + graphName + "\n" + fileUri + " " + TIM_MIMETYPE + " " + "application/octet-stream" + "^^" + STRING + " " + graphName + "\n" + fileUri + " " + RDFS_LABEL + " " + FILE_NAME + "^^" + STRING + " " + graphName + "\n" + fileUri + " " + PROV_ATTIME + " " + DATE + "^^" + XSD_DATETIMESTAMP + " " + graphName + "\n" + collection1 + " " + RDF_TYPE + " " + collection1 + "type " + graphName + "\n" + collection1 + " " + RDF_TYPE + " " + TIM_TABULAR_COLLECTION + " " + graphName + "\n" + collection1 + " " + RDFS_LABEL + " collection1" + "^^" + STRING + " " + graphName + "\n" + fileUri + " " + TIM_HASCOLLECTION + " " + collection1 + " " + graphName + "\n" + fileUri + " " + TIMBUCTOO_NEXT + " " + collection1 + " " + graphName + "\n" + prop + "tim_id " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection1 + " " + TIM_HAS_PROPERTY + " " + prop + "tim_id " + graphName + "\n" + prop + "tim_id " + TIM_PROP_ID + " -1" + "^^" + INTEGER + " " + graphName + "\n" + prop + "tim_id " + RDFS_LABEL + " tim_id" + "^^" + STRING + " " + graphName + "\n" + prop + "propName1 " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection1 + " " + TIM_HAS_PROPERTY + " " + prop + "propName1 " + graphName + "\n" + prop + "propName1 " + TIM_PROP_ID + " 1" + "^^" + INTEGER + " " + graphName + "\n" + prop + "propName1 " + RDFS_LABEL + " propName1" + "^^" + STRING + " " + graphName + "\n" + prop + "tim_id " + TIMBUCTOO_NEXT + " " + prop + "propName1 " + graphName + "\n" + prop + "propName2 " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection1 + " " + TIM_HAS_PROPERTY + " " + prop + "propName2 " + graphName + "\n" + prop + "propName2 " + TIM_PROP_ID + " 2" + "^^" + INTEGER + " " + graphName + "\n" + prop + "propName2 " + RDFS_LABEL + " propName2" + "^^" + STRING + " " + graphName + "\n" + prop + "propName1 " + TIMBUCTOO_NEXT + " " + prop + "propName2 " + graphName + "\n" + rowData + "1 " + RDF_TYPE + " " + collection1 + " " + graphName + "\n" + collection1 + " " + TIM_HAS_ROW + " " + rowData + "1 " + graphName + "\n" + rowData + "1 " + prop + "propName1" + " value1" + "^^" + STRING + " " + graphName + "\n" + rowData + "1 " + prop + "propName2" + " val2" + "^^" + STRING + " " + graphName + "\n" + rowData + "1 " + prop + "tim_id" + " {UUID}" + "^^" + STRING + " " + graphName + "\n" + rowData + "2 " + RDF_TYPE + " " + collection1 + " " + graphName + "\n" + collection1 + " " + TIM_HAS_ROW + " " + rowData + "2 " + graphName + "\n" + rowData + "2 " + prop + "propName1" + " entVal1" + "^^" + STRING + " " + graphName + "\n" + rowData + "2 " + prop + "propName2" + " entVal2" + "^^" + STRING + " " + graphName + "\n" + rowData + "2 " + prop + "tim_id" + " {UUID}" + "^^" + STRING + " " + graphName + "\n" + collection2 + " " + RDF_TYPE + " " + collection2 + "type " + graphName + "\n" + collection2 + " " + RDF_TYPE + " " + TIM_TABULAR_COLLECTION + " " + graphName + "\n" + collection2 + " " + RDFS_LABEL + " collection2" + "^^" + STRING + " " + graphName + "\n" + fileUri + " " + TIM_HASCOLLECTION + " " + collection2 + " " + graphName + "\n" + collection1 + " " + TIMBUCTOO_NEXT + " " + collection2 + " " + graphName + "\n" + prop + "tim_id " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection2 + " " + TIM_HAS_PROPERTY + " " + prop + "tim_id " + graphName + "\n" + prop + "tim_id " + TIM_PROP_ID + " -1" + "^^" + INTEGER + " " + graphName + "\n" + prop + "tim_id " + RDFS_LABEL + " tim_id" + "^^" + STRING + " " + graphName + "\n" + prop + "prop3 " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection2 + " " + TIM_HAS_PROPERTY + " " + prop + "prop3 " + graphName + "\n" + prop + "prop3 " + TIM_PROP_ID + " 1" + "^^" + INTEGER + " " + graphName + "\n" + prop + "prop3 " + RDFS_LABEL + " prop3" + "^^" + STRING + " " + graphName + "\n" + prop + "tim_id " + TIMBUCTOO_NEXT + " " + prop + "prop3 " + graphName + "\n" + prop + "prop4 " + RDF_TYPE + " " + TIM_PROP_DESC + " " + graphName + "\n" + collection2 + " " + TIM_HAS_PROPERTY + " " + prop + "prop4 " + graphName + "\n" + prop + "prop4 " + TIM_PROP_ID + " 2" + "^^" + INTEGER + " " + graphName + "\n" + prop + "prop4 " + RDFS_LABEL + " prop4" + "^^" + STRING + " " + graphName + "\n" + prop + "prop3 " + TIMBUCTOO_NEXT + " " + prop + "prop4 " + graphName + "\n" + rowData + "3 " + RDF_TYPE + " " + collection2 + " " + graphName + "\n" + collection2 + " " + TIM_HAS_ROW + " " + rowData + "3 " + graphName + "\n" + rowData + "3 " + prop + "prop3" + " value1" + "^^" + STRING + " " + graphName + "\n" + rowData + "3 " + prop + "prop4" + " val2" + "^^" + STRING + " " + graphName + "\n" + rowData + "3 " + prop + "tim_id" + " {UUID}" + "^^" + STRING + " " + graphName + "\n" + rowData + "4 " + RDF_TYPE + " " + collection2 + " " + graphName + "\n" + collection2 + " " + TIM_HAS_ROW + " " + rowData + "4 " + graphName + "\n" + rowData + "4 " + prop + "prop3" + " entVal1" + "^^" + STRING + " " + graphName + "\n" + rowData + "4 " + prop + "prop4" + " entVal2" + "^^" + STRING + " " + graphName + "\n" + rowData + "4 " + prop + "tim_id" + " {UUID}" + "^^" + STRING + " " + graphName + "\n", generatedRdf.replaceAll("[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}", "{UUID}"));
}
Also used : RawUploadRdfSaver(nl.knaw.huygens.timbuctoo.v5.bulkupload.RawUploadRdfSaver) ImportPropertyDescriptions(nl.knaw.huygens.timbuctoo.bulkupload.parsingstatemachine.ImportPropertyDescriptions) Matchers.containsString(org.hamcrest.Matchers.containsString) Test(org.junit.Test)

Example 10 with DataSetMetaData

use of nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData in project timbuctoo by HuygensING.

the class FileSystemDataStorage method loadDataSetMetaData.

@Override
public Map<String, Set<DataSetMetaData>> loadDataSetMetaData() throws IOException {
    Map<String, Set<DataSetMetaData>> metaDataSet = Maps.newHashMap();
    File[] directories = new File(dataSetMetadataLocation).listFiles(File::isDirectory);
    for (int i = 0; i < directories.length; i++) {
        String dirName = directories[i].toString();
        String currentOwnerId = dirName.substring(dirName.lastIndexOf("/") + 1, dirName.length());
        Set<DataSetMetaData> tempMetaDataSet = new HashSet<>();
        try (Stream<Path> fileStream = Files.walk(directories[i].toPath())) {
            Set<Path> paths = fileStream.filter(current -> Files.isDirectory(current)).collect(Collectors.toSet());
            for (Path path : paths) {
                File tempFile = new File(path.toString() + "/metaData.json");
                if (tempFile.exists()) {
                    JsonFileBackedData<BasicDataSetMetaData> metaDataFromFile = null;
                    metaDataFromFile = JsonFileBackedData.getOrCreate(tempFile, null, new TypeReference<BasicDataSetMetaData>() {
                    });
                    tempMetaDataSet.add(metaDataFromFile.getData());
                }
            }
        }
        metaDataSet.put(currentOwnerId, tempMetaDataSet);
    }
    return metaDataSet;
}
Also used : Path(java.nio.file.Path) JsonProperty(com.fasterxml.jackson.annotation.JsonProperty) Files(java.nio.file.Files) Set(java.util.Set) IOException(java.io.IOException) DataSetConfiguration(nl.knaw.huygens.timbuctoo.v5.dataset.DataSetConfiguration) Maps(com.google.common.collect.Maps) Collectors(java.util.stream.Collectors) File(java.io.File) HashSet(java.util.HashSet) DataStorage(nl.knaw.huygens.timbuctoo.v5.datastorage.DataStorage) Stream(java.util.stream.Stream) DataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData) Map(java.util.Map) JsonCreator(com.fasterxml.jackson.annotation.JsonCreator) JsonIgnore(com.fasterxml.jackson.annotation.JsonIgnore) RdfIoFactory(nl.knaw.huygens.timbuctoo.v5.rdfio.RdfIoFactory) FileHelper(nl.knaw.huygens.timbuctoo.v5.filehelper.FileHelper) TypeReference(com.fasterxml.jackson.core.type.TypeReference) Path(java.nio.file.Path) BasicDataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.BasicDataSetMetaData) DataSetStorage(nl.knaw.huygens.timbuctoo.v5.datastorage.DataSetStorage) JsonFileBackedData(nl.knaw.huygens.timbuctoo.v5.jsonfilebackeddata.JsonFileBackedData) Set(java.util.Set) HashSet(java.util.HashSet) BasicDataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.BasicDataSetMetaData) TypeReference(com.fasterxml.jackson.core.type.TypeReference) DataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData) BasicDataSetMetaData(nl.knaw.huygens.timbuctoo.v5.dataset.dto.BasicDataSetMetaData) File(java.io.File) HashSet(java.util.HashSet)

Aggregations

DataSetMetaData (nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSetMetaData)20 BasicDataSetMetaData (nl.knaw.huygens.timbuctoo.v5.dataset.dto.BasicDataSetMetaData)13 DataSet (nl.knaw.huygens.timbuctoo.v5.dataset.dto.DataSet)12 Test (org.junit.Test)12 Permission (nl.knaw.huygens.timbuctoo.v5.security.dto.Permission)10 User (nl.knaw.huygens.timbuctoo.v5.security.dto.User)10 IOException (java.io.IOException)5 VreAuthorization (nl.knaw.huygens.timbuctoo.security.dto.VreAuthorization)5 Map (java.util.Map)4 Collectors (java.util.stream.Collectors)3 Stream (java.util.stream.Stream)3 RsLn (nl.knaw.huygens.timbuctoo.remote.rs.xml.RsLn)3 RsMd (nl.knaw.huygens.timbuctoo.remote.rs.xml.RsMd)3 UrlItem (nl.knaw.huygens.timbuctoo.remote.rs.xml.UrlItem)3 Urlset (nl.knaw.huygens.timbuctoo.remote.rs.xml.Urlset)3 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)2 Charsets (com.google.common.base.Charsets)2 Resources (com.google.common.io.Resources)2 Resources.getResource (com.google.common.io.Resources.getResource)2 GraphQLSchema (graphql.schema.GraphQLSchema)2