use of edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity in project dataverse by IQSS.
the class FileRecordJobIT method testNewEditor.
@Test
@Ignore
public void testNewEditor() {
try {
// create contributor user
String contribUser = UUID.randomUUID().toString().substring(0, 8);
String contribToken = given().body("{" + " \"userName\": \"" + contribUser + "\"," + " \"firstName\": \"" + contribUser + "\"," + " \"lastName\": \"" + contribUser + "\"," + " \"email\": \"" + contribUser + "@mailinator.com\"" + "}").contentType(ContentType.JSON).request().post("/api/builtin-users/secret/" + props.getProperty("builtin.user.key")).then().assertThat().statusCode(200).extract().jsonPath().getString("data.apiToken");
Response grantRole = UtilIT.grantRoleOnDataverse(testName, DataverseRole.EDITOR.toString(), "@" + contribUser, token);
// grantRole.prettyPrint();
// create a single test file and put it in two places
String file1 = "testfile.txt";
String file2 = "subdir/testfile.txt";
File file = createTestFile(dsDir, file1, 0.25);
if (file != null) {
FileUtils.copyFile(file, new File(dsDir + file2));
} else {
System.out.println("Unable to copy file: " + dsDir + file2);
fail();
}
// mock the checksum manifest
String checksum1 = "asfdasdfasdfasdf";
String checksum2 = "sgsdgdsgfsdgsdgf";
if (file1 != null && file2 != null) {
PrintWriter pw = new PrintWriter(new FileWriter(dsDir + "/files.sha"));
pw.write(checksum1 + " " + file1);
pw.write("\n");
pw.write(checksum2 + " " + file2);
pw.write("\n");
pw.close();
} else {
fail();
}
// validate job
JobExecutionEntity job = getJobWithToken(contribToken);
assertEquals(job.getSteps().size(), 1);
StepExecutionEntity step1 = job.getSteps().get(0);
Map<String, Long> metrics = step1.getMetrics();
assertEquals(job.getExitStatus(), BatchStatus.COMPLETED.name());
assertEquals(job.getStatus(), BatchStatus.COMPLETED);
assertEquals(step1.getExitStatus(), BatchStatus.COMPLETED.name());
assertEquals(step1.getStatus(), BatchStatus.COMPLETED);
assertEquals(step1.getName(), "import-files");
assertEquals((long) metrics.get("write_skip_count"), 0);
assertEquals((long) metrics.get("commit_count"), 1);
assertEquals((long) metrics.get("process_skip_count"), 0);
assertEquals((long) metrics.get("read_skip_count"), 0);
assertEquals((long) metrics.get("write_count"), 2);
assertEquals((long) metrics.get("rollback_count"), 0);
assertEquals((long) metrics.get("filter_count"), 0);
assertEquals((long) metrics.get("read_count"), 2);
assertEquals(step1.getPersistentUserData(), null);
// confirm data files were imported
updateDatasetJsonPath();
List<String> storageIds = new ArrayList<>();
storageIds.add(dsPath.getString("data.latestVersion.files[0].dataFile.storageIdentifier"));
storageIds.add(dsPath.getString("data.latestVersion.files[1].dataFile.storageIdentifier"));
assert (storageIds.contains(file1));
assert (storageIds.contains(file2));
// test the reporting apis
given().header(API_TOKEN_HTTP_HEADER, contribToken).get(props.getProperty("job.status.api") + job.getId()).then().assertThat().body("status", equalTo("COMPLETED"));
List<Integer> ids = given().header(API_TOKEN_HTTP_HEADER, contribToken).get(props.getProperty("job.status.api")).then().extract().jsonPath().getList("jobs.id");
assertTrue(ids.contains((int) job.getId()));
} catch (Exception e) {
System.out.println("Error testNewEditor: " + e.getMessage());
e.printStackTrace();
fail();
}
}
use of edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity in project dataverse by IQSS.
the class FileRecordJobIT method testFileInChecksumManifestDoesntExist.
@Test
@Ignore
public /**
* Checksum manifest references a file that isn't present, it should return failed status and detailed
* message in persistentUserData
*/
void testFileInChecksumManifestDoesntExist() {
try {
// create test files and checksum manifest with record that doesn't exist
File file1 = createTestFile(dsDir, "testfile1.txt", 0.25);
File file2 = createTestFile(dsDir, "testfile2.txt", 0.25);
String checksum1 = "aorjsonaortargj848";
String checksum2 = "ldgklrrshfdsnosri4948";
if (file1 != null && file2 != null) {
PrintWriter pw = new PrintWriter(new FileWriter(dsDir + "/files.sha"));
pw.write(checksum1 + " " + file1.getName());
pw.write("\n");
pw.write(checksum2 + " " + file2.getName());
pw.write("\n");
pw.write("asdfae34034asfaf9r3 fileThatDoesntExist.txt");
pw.write("\n");
pw.close();
} else {
fail();
}
JobExecutionEntity job = getJob();
assertEquals(job.getSteps().size(), 1);
StepExecutionEntity step1 = job.getSteps().get(0);
Map<String, Long> metrics1 = step1.getMetrics();
// check job status
assertEquals(job.getExitStatus(), BatchStatus.COMPLETED.name());
assertEquals(job.getStatus(), BatchStatus.COMPLETED);
// check step 1 status and name
assertEquals(step1.getExitStatus(), BatchStatus.COMPLETED.name());
assertEquals(step1.getStatus(), BatchStatus.COMPLETED);
assertEquals(step1.getName(), "import-files");
// verify step 1 metrics
assertEquals((long) metrics1.get("write_skip_count"), 0);
assertEquals((long) metrics1.get("commit_count"), 1);
assertEquals((long) metrics1.get("process_skip_count"), 0);
assertEquals((long) metrics1.get("read_skip_count"), 0);
assertEquals((long) metrics1.get("write_count"), 2);
assertEquals((long) metrics1.get("rollback_count"), 0);
assertEquals((long) metrics1.get("filter_count"), 0);
assertEquals((long) metrics1.get("read_count"), 2);
// should be no user data (error messages)
assertEquals(step1.getPersistentUserData(), null);
// confirm files were imported
updateDatasetJsonPath();
List<String> filenames = new ArrayList<>();
filenames.add(dsPath.getString("data.latestVersion.files[0].dataFile.filename"));
filenames.add(dsPath.getString("data.latestVersion.files[1].dataFile.filename"));
assert (filenames.contains("testfile1.txt"));
assert (filenames.contains("testfile2.txt"));
// confirm checksums were imported
List<String> checksums = new ArrayList<>();
checksums.add(dsPath.getString("data.latestVersion.files[0].dataFile.checksum.value"));
checksums.add(dsPath.getString("data.latestVersion.files[1].dataFile.checksum.value"));
assert (checksums.contains(checksum1));
assert (checksums.contains(checksum2));
} catch (Exception e) {
System.out.println("Error testChecksumImport: " + e.getMessage());
e.printStackTrace();
fail();
}
}
use of edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity in project dataverse by IQSS.
the class FileRecordJobIT method testSameFileInDifferentDirectories.
/**
* Import the same file in different directories, in the same dataset.
* This is not permitted via HTTP file upload since identical checksums are not allowed in the same dataset.
* Ignores failed checksum manifest import.
*/
@Test
@Ignore
public void testSameFileInDifferentDirectories() {
try {
// create a single test file and put it in two places
String file1 = "testfile.txt";
String file2 = "subdir/testfile.txt";
File file = createTestFile(dsDir, file1, 0.25);
if (file != null) {
FileUtils.copyFile(file, new File(dsDir + file2));
} else {
System.out.println("Unable to copy file: " + dsDir + file2);
fail();
}
// mock the checksum manifest
String checksum1 = "asfdasdfasdfasdf";
String checksum2 = "sgsdgdsgfsdgsdgf";
if (file1 != null && file2 != null) {
PrintWriter pw = new PrintWriter(new FileWriter(dsDir + "/files.sha"));
pw.write(checksum1 + " " + file1);
pw.write("\n");
pw.write(checksum2 + " " + file2);
pw.write("\n");
pw.close();
} else {
fail();
}
// validate job
JobExecutionEntity job = getJob();
assertEquals(job.getSteps().size(), 1);
StepExecutionEntity step1 = job.getSteps().get(0);
Map<String, Long> metrics = step1.getMetrics();
assertEquals(job.getExitStatus(), BatchStatus.COMPLETED.name());
assertEquals(job.getStatus(), BatchStatus.COMPLETED);
assertEquals(step1.getExitStatus(), BatchStatus.COMPLETED.name());
assertEquals(step1.getStatus(), BatchStatus.COMPLETED);
assertEquals(step1.getName(), "import-files");
assertEquals((long) metrics.get("write_skip_count"), 0);
assertEquals((long) metrics.get("commit_count"), 1);
assertEquals((long) metrics.get("process_skip_count"), 0);
assertEquals((long) metrics.get("read_skip_count"), 0);
assertEquals((long) metrics.get("write_count"), 2);
assertEquals((long) metrics.get("rollback_count"), 0);
assertEquals((long) metrics.get("filter_count"), 0);
assertEquals((long) metrics.get("read_count"), 2);
assertEquals(step1.getPersistentUserData(), null);
// confirm data files were imported
updateDatasetJsonPath();
List<String> storageIds = new ArrayList<>();
storageIds.add(dsPath.getString("data.latestVersion.files[0].dataFile.storageIdentifier"));
storageIds.add(dsPath.getString("data.latestVersion.files[1].dataFile.storageIdentifier"));
assert (storageIds.contains(file1));
assert (storageIds.contains(file2));
// test the reporting apis
given().header(API_TOKEN_HTTP_HEADER, token).get(props.getProperty("job.status.api") + job.getId()).then().assertThat().body("status", equalTo("COMPLETED"));
List<Integer> ids = given().header(API_TOKEN_HTTP_HEADER, token).get(props.getProperty("job.status.api")).then().extract().jsonPath().getList("jobs.id");
assertTrue(ids.contains((int) job.getId()));
} catch (Exception e) {
System.out.println("Error testIdenticalFilesInDifferentDirectories: " + e.getMessage());
e.printStackTrace();
fail();
}
}
use of edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity in project dataverse by IQSS.
the class FileRecordJobIT method testFilesWithoutChecksumManifest.
@Test
@Ignore
public /**
* No checksum manifest found
*/
void testFilesWithoutChecksumManifest() {
try {
// create test files and NO checksum manifest
createTestFile(dsDir, "testfile1.txt", 0.25);
createTestFile(dsDir, "testfile2.txt", 0.25);
JobExecutionEntity job = getJob();
assertEquals(job.getSteps().size(), 1);
StepExecutionEntity step1 = job.getSteps().get(0);
Map<String, Long> metrics1 = step1.getMetrics();
// check job status
assertEquals(job.getExitStatus(), BatchStatus.FAILED.name());
assertEquals(job.getStatus(), BatchStatus.FAILED);
// check step 1 status and name
assertEquals(step1.getExitStatus(), BatchStatus.FAILED.name());
assertEquals(step1.getStatus(), BatchStatus.FAILED);
assertEquals(step1.getName(), "import-files");
// verify step 1 metrics
assertEquals((long) metrics1.get("write_skip_count"), 0);
assertEquals((long) metrics1.get("commit_count"), 0);
assertEquals((long) metrics1.get("process_skip_count"), 0);
assertEquals((long) metrics1.get("read_skip_count"), 0);
assertEquals((long) metrics1.get("write_count"), 0);
assertEquals((long) metrics1.get("rollback_count"), 0);
assertEquals((long) metrics1.get("filter_count"), 0);
assertEquals((long) metrics1.get("read_count"), 0);
// should be no user data (error messages)
assertEquals(step1.getPersistentUserData(), null);
// confirm files were imported and checksums unknown
updateDatasetJsonPath();
List<String> filenames = new ArrayList<>();
filenames.add(dsPath.getString("data.latestVersion.files[0].dataFile.filename"));
filenames.add(dsPath.getString("data.latestVersion.files[1].dataFile.filename"));
assert (filenames.contains("testfile1.txt"));
assert (filenames.contains("testfile2.txt"));
assert (dsPath.getString("data.latestVersion.files[0].dataFile.checksum.value").equalsIgnoreCase("unknown"));
assert (dsPath.getString("data.latestVersion.files[1].dataFile.checksum.value").equalsIgnoreCase("unknown"));
} catch (Exception e) {
System.out.println("Error testChecksumImportMissingManifest: " + e.getMessage());
e.printStackTrace();
fail();
}
}
use of edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity in project dataverse by IQSS.
the class BatchJobResource method listBatchJobsByName.
@GET
@Path("/jobs/name/{jobName}")
@Produces(MediaType.APPLICATION_JSON)
public Response listBatchJobsByName(@PathParam("jobName") String jobName) {
try {
final List<JobExecutionEntity> executionEntities = new ArrayList<>();
final JobOperator jobOperator = BatchRuntime.getJobOperator();
final int end = jobOperator.getJobInstanceCount(jobName);
final List<JobInstance> jobInstances = jobOperator.getJobInstances(jobName, 0, end);
for (JobInstance jobInstance : jobInstances) {
final List<JobExecution> executions = jobOperator.getJobExecutions(jobInstance);
for (JobExecution execution : executions) {
executionEntities.add(JobExecutionEntity.create(execution));
}
}
return Response.ok("{ \"jobs\": \n" + mapper.writeValueAsString(executionEntities) + "\n}").build();
} catch (Exception e) {
return Response.ok(EMPTY_JSON_LIST).build();
}
}
Aggregations