use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class CreatePrivateUrlCommandTest method testAttemptCreatePrivateUrlOnNonDraft.
@Test
public void testAttemptCreatePrivateUrlOnNonDraft() {
dataset = new Dataset();
List<DatasetVersion> versions = new ArrayList<>();
DatasetVersion datasetVersion = new DatasetVersion();
datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED);
versions.add(datasetVersion);
dataset.setVersions(versions);
dataset.setId(latestVersionIsNotDraft);
String expected = "Can't create Private URL because the latest version of dataset id " + latestVersionIsNotDraft + " is not a draft.";
String actual = null;
PrivateUrl privateUrl = null;
try {
privateUrl = testEngine.submit(new CreatePrivateUrlCommand(null, dataset));
} catch (CommandException ex) {
actual = ex.getMessage();
}
assertEquals(expected, actual);
assertNull(privateUrl);
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class SubmitDatasetForReviewCommandTest method setUp.
@Before
public void setUp() {
dataset = new Dataset();
HttpServletRequest aHttpServletRequest = null;
dataverseRequest = new DataverseRequest(MocksFactory.makeAuthenticatedUser("First", "Last"), aHttpServletRequest);
testEngine = new TestDataverseEngine(new TestCommandContext() {
@Override
public AuthenticationServiceBean authentication() {
return new AuthenticationServiceBean() {
@Override
public AuthenticatedUser getAuthenticatedUser(String id) {
return MocksFactory.makeAuthenticatedUser("First", "Last");
}
};
}
@Override
public IndexServiceBean index() {
return new IndexServiceBean() {
@Override
public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) {
return null;
}
};
}
@Override
public EntityManager em() {
return new TestEntityManager() {
@Override
public <T> T merge(T entity) {
return entity;
}
@Override
public void flush() {
// nothing to do here
}
};
}
@Override
public DatasetServiceBean datasets() {
return new DatasetServiceBean() {
{
em = new NoOpTestEntityManager();
}
@Override
public DatasetVersionUser getDatasetVersionUser(DatasetVersion version, User user) {
return null;
}
@Override
public DatasetLock addDatasetLock(Long datasetId, DatasetLock.Reason reason, Long userId, String info) {
return null;
}
};
}
@Override
public DataverseRoleServiceBean roles() {
return new DataverseRoleServiceBean() {
@Override
public DataverseRole findBuiltinRoleByAlias(String alias) {
return new DataverseRole();
}
@Override
public RoleAssignment save(RoleAssignment assignment) {
// no-op
return assignment;
}
};
}
@Override
public PermissionServiceBean permissions() {
return new PermissionServiceBean() {
@Override
public List<AuthenticatedUser> getUsersWithPermissionOn(Permission permission, DvObject dvo) {
// We only need permissions for notifications, which we are testing in InReviewWorkflowIT.
return Collections.emptyList();
}
};
}
});
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class SchemaDotOrgExporterTest method testExportDataset.
/**
* Test of exportDataset method, of class SchemaDotOrgExporter.
*/
@Test
public void testExportDataset() throws Exception {
System.out.println("exportDataset");
File datasetVersionJson = new File("src/test/resources/json/dataset-finch1.json");
String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
JsonReader jsonReader1 = Json.createReader(new StringReader(datasetVersionAsJson));
JsonObject json1 = jsonReader1.readObject();
JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, null);
DatasetVersion version = jsonParser.parseDatasetVersion(json1.getJsonObject("datasetVersion"));
version.setVersionState(DatasetVersion.VersionState.RELEASED);
SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd");
Date publicationDate = dateFmt.parse("19551105");
version.setReleaseTime(publicationDate);
version.setVersionNumber(1l);
// TODO: It might be nice to test TermsOfUseAndAccess some day
version.setTermsOfUseAndAccess(null);
Dataset dataset = new Dataset();
dataset.setProtocol("doi");
dataset.setAuthority("myAuthority");
dataset.setIdentifier("myIdentifier");
version.setDataset(dataset);
Dataverse dataverse = new Dataverse();
dataverse.setName("LibraScholar");
dataset.setOwner(dataverse);
System.setProperty(SITE_URL, "https://librascholar.org");
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
schemaDotOrgExporter.exportDataset(version, json1, byteArrayOutputStream);
String jsonLd = byteArrayOutputStream.toString();
System.out.println("schema.org JSON-LD: " + JsonUtil.prettyPrint(jsonLd));
JsonReader jsonReader2 = Json.createReader(new StringReader(jsonLd));
JsonObject json2 = jsonReader2.readObject();
assertEquals("http://schema.org", json2.getString("@context"));
assertEquals("Dataset", json2.getString("@type"));
assertEquals("https://doi.org/myAuthority/myIdentifier", json2.getString("identifier"));
assertEquals("Darwin's Finches", json2.getString("name"));
assertEquals("Finch, Fiona", json2.getJsonArray("author").getJsonObject(0).getString("name"));
assertEquals("Birds Inc.", json2.getJsonArray("author").getJsonObject(0).getString("affiliation"));
assertEquals("1955-11-05", json2.getString("dateModified"));
assertEquals("1", json2.getString("version"));
assertEquals("Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.", json2.getString("description"));
assertEquals("Medicine, Health and Life Sciences", json2.getJsonArray("keywords").getString(0));
assertEquals("https://schema.org/version/3.3", json2.getString("schemaVersion"));
assertEquals("DataCatalog", json2.getJsonObject("includedInDataCatalog").getString("@type"));
assertEquals("LibraScholar", json2.getJsonObject("includedInDataCatalog").getString("name"));
assertEquals("https://librascholar.org", json2.getJsonObject("includedInDataCatalog").getString("url"));
assertEquals("Organization", json2.getJsonObject("provider").getString("@type"));
assertEquals("Dataverse", json2.getJsonObject("provider").getString("name"));
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class IngestUtilTest method testRecalculateDatasetVersionUNF.
@Test
public void testRecalculateDatasetVersionUNF() {
IngestUtil.recalculateDatasetVersionUNF(null);
DatasetVersion dsvNoFile = new DatasetVersion();
IngestUtil.recalculateDatasetVersionUNF(dsvNoFile);
assertEquals(null, dsvNoFile.getUNF());
List<Dataset> datasets = new ArrayList<>();
Dataset dataset = new Dataset();
dataset.setProtocol("doi");
dataset.setAuthority("fakeAuthority");
dataset.setIdentifier("12345");
DatasetVersion dsv1 = new DatasetVersion();
dsv1.setDataset(dataset);
dsv1.setId(42l);
dsv1.setVersionState(DatasetVersion.VersionState.DRAFT);
List<DatasetVersion> datasetVersions = new ArrayList<>();
datasetVersions.add(dsv1);
DataFile datafile1 = new DataFile("application/octet-stream");
DataTable dataTable = new DataTable();
dataTable.setUnf("unfOnDataTable");
datafile1.setDataTable(dataTable);
assertEquals(true, datafile1.isTabularData());
FileMetadata fmd1 = new FileMetadata();
fmd1.setId(1L);
fmd1.setLabel("datafile1.txt");
fmd1.setDataFile(datafile1);
datafile1.getFileMetadatas().add(fmd1);
dsv1.getFileMetadatas().add(fmd1);
fmd1.setDatasetVersion(dsv1);
dataset.setVersions(datasetVersions);
datasets.add(dataset);
assertEquals(null, dsv1.getUNF());
IngestUtil.recalculateDatasetVersionUNF(dsv1);
assertEquals("UNF:6:rDlgOhoEkEQQdwtLRHjmtw==", dsv1.getUNF());
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class IngestUtilTest method testCheckForDuplicateFileNamesWithDirectories.
@Test
public /**
* Test adding duplicate file name labels with directories, including a
* duplicate file name label in another directory.
*/
void testCheckForDuplicateFileNamesWithDirectories() throws Exception {
SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd");
// create dataset
Dataset dataset = makeDataset();
// create dataset version
DatasetVersion datasetVersion = dataset.getEditVersion();
datasetVersion.setCreateTime(dateFmt.parse("20001012"));
datasetVersion.setLastUpdateTime(datasetVersion.getLastUpdateTime());
datasetVersion.setId(MocksFactory.nextId());
datasetVersion.setReleaseTime(dateFmt.parse("20010101"));
datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED);
datasetVersion.setMinorVersionNumber(0L);
datasetVersion.setVersionNumber(1L);
datasetVersion.setFileMetadatas(new ArrayList<>());
// create datafiles
List<DataFile> dataFileList = new ArrayList<>();
DataFile datafile1 = new DataFile("application/octet-stream");
datafile1.setStorageIdentifier("subdir/datafile1.txt");
datafile1.setFilesize(200);
datafile1.setModificationTime(new Timestamp(new Date().getTime()));
datafile1.setCreateDate(new Timestamp(new Date().getTime()));
datafile1.setPermissionModificationTime(new Timestamp(new Date().getTime()));
datafile1.setOwner(dataset);
datafile1.setIngestDone();
datafile1.setChecksumType(DataFile.ChecksumType.SHA1);
datafile1.setChecksumValue("Unknown");
// set metadata and add version
FileMetadata fmd1 = new FileMetadata();
fmd1.setId(1L);
fmd1.setLabel("datafile1.txt");
fmd1.setDirectoryLabel("subdir");
fmd1.setDataFile(datafile1);
datafile1.getFileMetadatas().add(fmd1);
datasetVersion.getFileMetadatas().add(fmd1);
fmd1.setDatasetVersion(datasetVersion);
dataFileList.add(datafile1);
DataFile datafile2 = new DataFile("application/octet-stream");
datafile2.setStorageIdentifier("subdir/datafile2.txt");
datafile2.setFilesize(200);
datafile2.setModificationTime(new Timestamp(new Date().getTime()));
datafile2.setCreateDate(new Timestamp(new Date().getTime()));
datafile2.setPermissionModificationTime(new Timestamp(new Date().getTime()));
datafile2.setOwner(dataset);
datafile2.setIngestDone();
datafile2.setChecksumType(DataFile.ChecksumType.SHA1);
datafile2.setChecksumValue("Unknown");
// set metadata and add version
FileMetadata fmd2 = new FileMetadata();
fmd2.setId(2L);
fmd2.setLabel("datafile2.txt");
fmd2.setDirectoryLabel("subdir");
fmd2.setDataFile(datafile2);
datafile2.getFileMetadatas().add(fmd2);
datasetVersion.getFileMetadatas().add(fmd2);
fmd2.setDatasetVersion(datasetVersion);
dataFileList.add(datafile2);
DataFile datafile3 = new DataFile("application/octet-stream");
datafile3.setStorageIdentifier("datafile2.txt");
datafile3.setFilesize(200);
datafile3.setModificationTime(new Timestamp(new Date().getTime()));
datafile3.setCreateDate(new Timestamp(new Date().getTime()));
datafile3.setPermissionModificationTime(new Timestamp(new Date().getTime()));
datafile3.setOwner(dataset);
datafile3.setIngestDone();
datafile3.setChecksumType(DataFile.ChecksumType.SHA1);
datafile3.setChecksumValue("Unknown");
// set metadata and add version
FileMetadata fmd3 = new FileMetadata();
fmd3.setId(3L);
fmd3.setLabel("datafile2.txt");
fmd3.setDataFile(datafile3);
datafile3.getFileMetadatas().add(fmd3);
dataFileList.add(datafile3);
IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
boolean file1NameAltered = false;
boolean file2NameAltered = false;
boolean file3NameAltered = true;
for (DataFile df : dataFileList) {
if (df.getFileMetadata().getLabel().equals("datafile1-1.txt")) {
file1NameAltered = true;
}
if (df.getFileMetadata().getLabel().equals("datafile2-1.txt")) {
file2NameAltered = true;
}
if (df.getFileMetadata().getLabel().equals("datafile2.txt")) {
file3NameAltered = false;
}
}
// check filenames are unique
assertEquals(file1NameAltered, true);
assertEquals(file2NameAltered, true);
assertEquals(file3NameAltered, false);
// add duplicate file in root
datasetVersion.getFileMetadatas().add(fmd3);
fmd3.setDatasetVersion(datasetVersion);
// try to add data files with "-1" duplicates and see if it gets incremented to "-2"
IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
for (DataFile df : dataFileList) {
if (df.getFileMetadata().getLabel().equals("datafile1-2.txt")) {
file1NameAltered = true;
}
if (df.getFileMetadata().getLabel().equals("datafile2-2.txt")) {
file2NameAltered = true;
}
if (df.getFileMetadata().getLabel().equals("datafile2-1.txt")) {
file3NameAltered = true;
}
}
// check filenames are unique
assertEquals(file1NameAltered, true);
assertEquals(file2NameAltered, true);
assertEquals(file3NameAltered, true);
}
Aggregations