Search in sources :

Example 6 with JobExecutionEntity

use of edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity in project dataverse by IQSS.

the class BatchJobResource method listBatchJobs.

@GET
@Path("/jobs")
@Produces(MediaType.APPLICATION_JSON)
public Response listBatchJobs() {
    try {
        final List<JobExecutionEntity> executionEntities = new ArrayList<>();
        final JobOperator jobOperator = BatchRuntime.getJobOperator();
        final Set<String> names = jobOperator.getJobNames();
        for (String name : names) {
            final int end = jobOperator.getJobInstanceCount(name);
            final List<JobInstance> jobInstances = jobOperator.getJobInstances(name, 0, end);
            for (JobInstance jobInstance : jobInstances) {
                final List<JobExecution> executions = jobOperator.getJobExecutions(jobInstance);
                for (JobExecution execution : executions) {
                    executionEntities.add(JobExecutionEntity.create(execution));
                }
            }
        }
        return Response.ok("{ \"jobs\": \n" + mapper.writeValueAsString(executionEntities) + "\n}").build();
    } catch (Exception e) {
        return Response.ok(EMPTY_JSON_LIST).build();
    }
}
Also used : JobExecution(javax.batch.runtime.JobExecution) JobInstance(javax.batch.runtime.JobInstance) JobExecutionEntity(edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity) ArrayList(java.util.ArrayList) JobOperator(javax.batch.operations.JobOperator) Path(javax.ws.rs.Path) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET)

Example 7 with JobExecutionEntity

use of edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity in project dataverse by IQSS.

the class FileRecordJobListener method doReport.

/**
 * Generate all the job reports and user notifications.
 */
private void doReport() {
    try {
        String jobJson;
        String jobId = Long.toString(jobContext.getInstanceId());
        JobOperator jobOperator = BatchRuntime.getJobOperator();
        if (user == null) {
            getJobLogger().log(Level.SEVERE, "Cannot find authenticated user.");
            return;
        }
        if (dataset == null) {
            getJobLogger().log(Level.SEVERE, "Cannot find dataset.");
            return;
        }
        long datasetVersionId = dataset.getLatestVersion().getId();
        JobExecution jobExecution = jobOperator.getJobExecution(jobContext.getInstanceId());
        if (jobExecution != null) {
            Date date = new Date();
            Timestamp timestamp = new Timestamp(date.getTime());
            JobExecutionEntity jobExecutionEntity = JobExecutionEntity.create(jobExecution);
            jobExecutionEntity.setExitStatus("COMPLETED");
            jobExecutionEntity.setStatus(BatchStatus.COMPLETED);
            jobExecutionEntity.setEndTime(date);
            jobJson = new ObjectMapper().writeValueAsString(jobExecutionEntity);
            String logDir = System.getProperty("com.sun.aas.instanceRoot") + SEP + "logs" + SEP + "batch-jobs" + SEP;
            // [1] save json log to file
            LoggingUtil.saveJsonLog(jobJson, logDir, jobId);
            // [2] send user notifications - to all authors
            notificationServiceBean.sendNotification(user, timestamp, notifyType, datasetVersionId);
            Map<String, AuthenticatedUser> distinctAuthors = permissionServiceBean.getDistinctUsersWithPermissionOn(Permission.EditDataset, dataset);
            distinctAuthors.values().forEach((value) -> {
                notificationServiceBean.sendNotification((AuthenticatedUser) value, new Timestamp(new Date().getTime()), notifyType, datasetVersionId);
            });
            // [3] send SuperUser notification
            List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers();
            if (superUsers != null && !superUsers.isEmpty()) {
                superUsers.forEach((au) -> {
                    notificationServiceBean.sendNotification(au, timestamp, notifyType, datasetVersionId);
                });
            }
            // [4] action log: store location of the full log to avoid truncation issues
            actionLogServiceBean.log(LoggingUtil.getActionLogRecord(user.getIdentifier(), jobExecution, logDir + "job-" + jobId + ".log", jobId));
        } else {
            getJobLogger().log(Level.SEVERE, "Job execution is null");
        }
    } catch (NoSuchJobExecutionException | JobSecurityException | JsonProcessingException e) {
        getJobLogger().log(Level.SEVERE, "Creating job json: " + e.getMessage());
    }
}
Also used : JobExecutionEntity(edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity) JobOperator(javax.batch.operations.JobOperator) Timestamp(java.sql.Timestamp) AuthenticatedUser(edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser) Date(java.util.Date) JobExecution(javax.batch.runtime.JobExecution) JobSecurityException(javax.batch.operations.JobSecurityException) NoSuchJobExecutionException(javax.batch.operations.NoSuchJobExecutionException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 8 with JobExecutionEntity

use of edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity in project dataverse by IQSS.

the class FileRecordJobIT method testAddingFilesInMergeMode.

// @Test
// /**
// * Delete a file in REPLACE mode
// */
// public void testDeleteFileInReplaceMode() {
// 
// try {
// 
// // create a single test file and put it in two places
// String file1 =  "testfile.txt";
// String file2 = "subdir/testfile.txt";
// File file = createTestFile(dsDir, file1, 0.25);
// if (file != null) {
// FileUtils.copyFile(file, new File(dsDir + file2));
// } else {
// System.out.println("Unable to copy file: " + dsDir + file2);
// fail();
// }
// 
// // mock the checksum manifest
// String checksum1 = "asfdasdfasdfasdf";
// String checksum2 = "sgsdgdsgfsdgsdgf";
// if (file1 != null && file2 != null) {
// PrintWriter pw = new PrintWriter(new FileWriter(dsDir + "/files.sha"));
// pw.write(checksum1 + " " + file1);
// pw.write("\n");
// pw.write(checksum2 + " " + file2);
// pw.write("\n");
// pw.close();
// } else {
// fail();
// }
// 
// // validate job
// JobExecutionEntity job = getJob();
// assertEquals(job.getSteps().size(), 2);
// StepExecutionEntity step1 = job.getSteps().get(0);
// Map<String, Long> metrics = step1.getMetrics();
// assertEquals(job.getExitStatus(), BatchStatus.COMPLETED.name());
// assertEquals(job.getStatus(), BatchStatus.COMPLETED);
// assertEquals(step1.getExitStatus(), BatchStatus.COMPLETED.name());
// assertEquals(step1.getStatus(), BatchStatus.COMPLETED);
// assertEquals(step1.getName(), "import-files");
// assertEquals((long) metrics.get("write_skip_count"), 0);
// assertEquals((long) metrics.get("commit_count"), 1);
// assertEquals((long) metrics.get("process_skip_count"), 0);
// assertEquals((long) metrics.get("read_skip_count"), 0);
// assertEquals((long) metrics.get("write_count"), 2);
// assertEquals((long) metrics.get("rollback_count"), 0);
// assertEquals((long) metrics.get("filter_count"), 0);
// assertEquals((long) metrics.get("read_count"), 2);
// assertEquals(step1.getPersistentUserData(), null);
// 
// // confirm data files were imported
// updateDatasetJsonPath();
// List<String> storageIds = new ArrayList<>();
// storageIds.add(dsPath.getString("data.latestVersion.files[0].dataFile.storageIdentifier"));
// storageIds.add(dsPath.getString("data.latestVersion.files[1].dataFile.storageIdentifier"));
// assert(storageIds.contains(file1));
// assert(storageIds.contains(file2));
// 
// // test the reporting apis
// given()
// .header(API_TOKEN_HTTP_HEADER, token)
// .get(props.getProperty("job.status.api") + job.getId())
// .then().assertThat()
// .body("status", equalTo("COMPLETED"));
// List<Integer> ids =  given()
// .header(API_TOKEN_HTTP_HEADER, token)
// .get(props.getProperty("job.status.api"))
// .then().extract().jsonPath()
// .getList("jobs.id");
// assertTrue(ids.contains((int)job.getId()));
// 
// 
// // delete one file and run the job again
// file.delete();
// 
// // mock the checksum manifest
// if (file1 != null) {
// PrintWriter pw = new PrintWriter(new FileWriter(dsDir + "/files.sha"));
// pw.write(checksum1 + " " + file1);
// pw.write("\n");
// pw.close();
// } else {
// fail();
// }
// 
// // validate job again
// JobExecutionEntity newJob = getJobWithMode("REPLACE");
// assertEquals(newJob.getSteps().size(), 2);
// StepExecutionEntity newSteps = newJob.getSteps().get(0);
// Map<String, Long> newMetrics = newSteps.getMetrics();
// assertEquals(newJob.getExitStatus(), BatchStatus.COMPLETED.name());
// assertEquals(newJob.getStatus(), BatchStatus.COMPLETED);
// assertEquals(newSteps.getExitStatus(), BatchStatus.COMPLETED.name());
// assertEquals(newSteps.getStatus(), BatchStatus.COMPLETED);
// assertEquals(newSteps.getName(), "import-files");
// assertEquals(0, (long) newMetrics.get("write_skip_count"));
// assertEquals(1, (long) newMetrics.get("commit_count"));
// assertEquals(0, (long) newMetrics.get("process_skip_count"));
// assertEquals(0, (long) newMetrics.get("read_skip_count"));
// assertEquals(1, (long) newMetrics.get("write_count"));
// assertEquals(0, (long) newMetrics.get("rollback_count"));
// assertEquals(0, (long) newMetrics.get("filter_count"));
// assertEquals(1, (long) newMetrics.get("read_count"));
// assertEquals(newSteps.getPersistentUserData(), null);
// 
// // confirm data files were imported
// updateDatasetJsonPath();
// //System.out.println("DATASET JSON: " + dsPath.prettyPrint());
// List<String> newStorageIds = new ArrayList<>();
// newStorageIds.add(dsPath.getString("data.latestVersion.files[0].dataFile.storageIdentifier"));
// assert(newStorageIds.contains(file2)); // should contain subdir/testfile.txt still
// 
// // test the reporting apis
// given()
// .header(API_TOKEN_HTTP_HEADER, token)
// .get(props.getProperty("job.status.api") + newJob.getId())
// .then().assertThat()
// .body("status", equalTo("COMPLETED"));
// List<Integer> newIds =  given()
// .header(API_TOKEN_HTTP_HEADER, token)
// .get(props.getProperty("job.status.api"))
// .then().extract().jsonPath()
// .getList("jobs.id");
// assertTrue(newIds.contains((int)job.getId()));
// 
// } catch (Exception e) {
// System.out.println("Error testIdenticalFilesInDifferentDirectories: " + e.getMessage());
// e.printStackTrace();
// fail();
// }
// }
@Test
@Ignore
public /**
 * Add a file in MERGE mode (default), should only need to commit the new file
 */
void testAddingFilesInMergeMode() {
    try {
        // create a single test file and put it in two places
        String file1 = "testfile.txt";
        String file2 = "subdir/testfile.txt";
        File file = createTestFile(dsDir, file1, 0.25);
        if (file != null) {
            FileUtils.copyFile(file, new File(dsDir + file2));
        } else {
            System.out.println("Unable to copy file: " + dsDir + file2);
            fail();
        }
        // mock the checksum manifest
        String checksum1 = "asfdasdfasdfasdf";
        String checksum2 = "sgsdgdsgfsdgsdgf";
        if (file1 != null && file2 != null) {
            PrintWriter pw = new PrintWriter(new FileWriter(dsDir + "/files.sha"));
            pw.write(checksum1 + " " + file1);
            pw.write("\n");
            pw.write(checksum2 + " " + file2);
            pw.write("\n");
            pw.close();
        } else {
            fail();
        }
        // validate job
        JobExecutionEntity job = getJob();
        assertEquals(job.getSteps().size(), 1);
        StepExecutionEntity step1 = job.getSteps().get(0);
        Map<String, Long> metrics = step1.getMetrics();
        assertEquals(job.getExitStatus(), BatchStatus.COMPLETED.name());
        assertEquals(job.getStatus(), BatchStatus.COMPLETED);
        assertEquals(step1.getExitStatus(), BatchStatus.COMPLETED.name());
        assertEquals(step1.getStatus(), BatchStatus.COMPLETED);
        assertEquals(step1.getName(), "import-files");
        assertEquals((long) metrics.get("write_skip_count"), 0);
        assertEquals((long) metrics.get("commit_count"), 1);
        assertEquals((long) metrics.get("process_skip_count"), 0);
        assertEquals((long) metrics.get("read_skip_count"), 0);
        assertEquals((long) metrics.get("write_count"), 2);
        assertEquals((long) metrics.get("rollback_count"), 0);
        assertEquals((long) metrics.get("filter_count"), 0);
        assertEquals((long) metrics.get("read_count"), 2);
        assertEquals(step1.getPersistentUserData(), null);
        // confirm data files were imported
        updateDatasetJsonPath();
        List<String> storageIds = new ArrayList<>();
        storageIds.add(dsPath.getString("data.latestVersion.files[0].dataFile.storageIdentifier"));
        storageIds.add(dsPath.getString("data.latestVersion.files[1].dataFile.storageIdentifier"));
        assert (storageIds.contains(file1));
        assert (storageIds.contains(file2));
        // test the reporting apis
        given().header(API_TOKEN_HTTP_HEADER, token).get(props.getProperty("job.status.api") + job.getId()).then().assertThat().body("status", equalTo("COMPLETED"));
        List<Integer> ids = given().header(API_TOKEN_HTTP_HEADER, token).get(props.getProperty("job.status.api")).then().extract().jsonPath().getList("jobs.id");
        assertTrue(ids.contains((int) job.getId()));
        // add a new file and run the job again
        String file3 = "addedfile.txt";
        File addedFile = createTestFile(dsDir, file3, 0.25);
        // mock the checksum manifest
        String checksum3 = "asfdasdfasdfasdf";
        if (file1 != null && file2 != null && file3 != null) {
            PrintWriter pw = new PrintWriter(new FileWriter(dsDir + "/files.sha"));
            pw.write(checksum1 + " " + file1);
            pw.write("\n");
            pw.write(checksum2 + " " + file2);
            pw.write("\n");
            pw.write(checksum3 + " " + file3);
            pw.write("\n");
            pw.close();
        } else {
            fail();
        }
        // validate job again
        JobExecutionEntity newJob = getJobWithMode("MERGE");
        assertEquals(newJob.getSteps().size(), 1);
        StepExecutionEntity newSteps = newJob.getSteps().get(0);
        Map<String, Long> newMetrics = newSteps.getMetrics();
        assertEquals(newJob.getExitStatus(), BatchStatus.COMPLETED.name());
        assertEquals(newJob.getStatus(), BatchStatus.COMPLETED);
        assertEquals(newSteps.getExitStatus(), BatchStatus.COMPLETED.name());
        assertEquals(newSteps.getStatus(), BatchStatus.COMPLETED);
        assertEquals(newSteps.getName(), "import-files");
        assertEquals(0, (long) newMetrics.get("write_skip_count"));
        assertEquals(1, (long) newMetrics.get("commit_count"));
        assertEquals(0, (long) newMetrics.get("process_skip_count"));
        assertEquals(0, (long) newMetrics.get("read_skip_count"));
        assertEquals(1, (long) newMetrics.get("write_count"));
        assertEquals(0, (long) newMetrics.get("rollback_count"));
        assertEquals(2, (long) newMetrics.get("filter_count"));
        assertEquals(3, (long) newMetrics.get("read_count"));
        assertEquals(newSteps.getPersistentUserData(), null);
        // confirm data files were imported
        updateDatasetJsonPath();
        List<String> newStorageIds = new ArrayList<>();
        newStorageIds.add(dsPath.getString("data.latestVersion.files[0].dataFile.storageIdentifier"));
        newStorageIds.add(dsPath.getString("data.latestVersion.files[1].dataFile.storageIdentifier"));
        newStorageIds.add(dsPath.getString("data.latestVersion.files[2].dataFile.storageIdentifier"));
        assert (newStorageIds.contains(file1));
        assert (newStorageIds.contains(file2));
        assert (newStorageIds.contains(file3));
        // test the reporting apis
        given().header(API_TOKEN_HTTP_HEADER, token).get(props.getProperty("job.status.api") + newJob.getId()).then().assertThat().body("status", equalTo("COMPLETED"));
        List<Integer> newIds = given().header(API_TOKEN_HTTP_HEADER, token).get(props.getProperty("job.status.api")).then().extract().jsonPath().getList("jobs.id");
        assertTrue(newIds.contains((int) job.getId()));
    } catch (Exception e) {
        System.out.println("Error testIdenticalFilesInDifferentDirectories: " + e.getMessage());
        e.printStackTrace();
        fail();
    }
}
Also used : FileWriter(java.io.FileWriter) JobExecutionEntity(edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity) ArrayList(java.util.ArrayList) IOException(java.io.IOException) StepExecutionEntity(edu.harvard.iq.dataverse.batch.entities.StepExecutionEntity) File(java.io.File) PrintWriter(java.io.PrintWriter) Ignore(org.junit.Ignore) Test(org.junit.Test)

Example 9 with JobExecutionEntity

use of edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity in project dataverse by IQSS.

the class FileRecordJobIT method testFilesWithChecksumManifest.

@Test
@Ignore
public /**
 * The success case: all files uploaded and present in checksum manifest
 */
void testFilesWithChecksumManifest() {
    try {
        // create test files and checksum manifest
        File file1 = createTestFile(dsDir, "testfile1.txt", 0.25);
        File file2 = createTestFile(dsDir, "testfile2.txt", 0.25);
        String checksum1 = "asfdasdfasdfasdf";
        String checksum2 = "sgsdgdsgfsdgsdgf";
        if (file1 != null && file2 != null) {
            PrintWriter pw = new PrintWriter(new FileWriter(dsDir + "/files.sha"));
            pw.write(checksum1 + " " + file1.getName());
            pw.write("\n");
            pw.write(checksum2 + " " + file2.getName());
            pw.write("\n");
            pw.close();
        } else {
            fail();
        }
        JobExecutionEntity job = getJob();
        assertEquals(job.getSteps().size(), 1);
        StepExecutionEntity step1 = job.getSteps().get(0);
        Map<String, Long> metrics1 = step1.getMetrics();
        // check job status
        assertEquals(BatchStatus.COMPLETED.name(), job.getExitStatus());
        assertEquals(BatchStatus.COMPLETED, job.getStatus());
        // check step 1 status and name
        assertEquals(step1.getExitStatus(), BatchStatus.COMPLETED.name());
        assertEquals(step1.getStatus(), BatchStatus.COMPLETED);
        assertEquals(step1.getName(), "import-files");
        // verify step 1 metrics
        assertEquals((long) metrics1.get("write_skip_count"), 0);
        assertEquals((long) metrics1.get("commit_count"), 1);
        assertEquals((long) metrics1.get("process_skip_count"), 0);
        assertEquals((long) metrics1.get("read_skip_count"), 0);
        assertEquals((long) metrics1.get("write_count"), 2);
        assertEquals((long) metrics1.get("rollback_count"), 0);
        assertEquals((long) metrics1.get("filter_count"), 0);
        assertEquals((long) metrics1.get("read_count"), 2);
        // should be no user data (error messages)
        assertEquals(step1.getPersistentUserData(), null);
        // confirm files were imported
        updateDatasetJsonPath();
        List<String> filenames = new ArrayList<>();
        filenames.add(dsPath.getString("data.latestVersion.files[0].dataFile.filename"));
        filenames.add(dsPath.getString("data.latestVersion.files[1].dataFile.filename"));
        assert (filenames.contains("testfile1.txt"));
        assert (filenames.contains("testfile2.txt"));
        // confirm checksums were imported
        List<String> checksums = new ArrayList<>();
        checksums.add(dsPath.getString("data.latestVersion.files[0].dataFile.checksum.value"));
        checksums.add(dsPath.getString("data.latestVersion.files[1].dataFile.checksum.value"));
        assert (checksums.contains(checksum1));
        assert (checksums.contains(checksum2));
    } catch (Exception e) {
        System.out.println("Error testChecksumImport: " + e.getMessage());
        e.printStackTrace();
        fail();
    }
}
Also used : StepExecutionEntity(edu.harvard.iq.dataverse.batch.entities.StepExecutionEntity) FileWriter(java.io.FileWriter) JobExecutionEntity(edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity) ArrayList(java.util.ArrayList) File(java.io.File) IOException(java.io.IOException) PrintWriter(java.io.PrintWriter) Ignore(org.junit.Ignore) Test(org.junit.Test)

Example 10 with JobExecutionEntity

use of edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity in project dataverse by IQSS.

the class FileRecordJobIT method testFileMissingInChecksumManifest.

@Test
@Ignore
public /**
 * Checksum manifest is missing an uploaded file
 */
void testFileMissingInChecksumManifest() {
    try {
        // create test files and checksum manifest with just one of the files
        File file1 = createTestFile(dsDir, "testfile1.txt", 0.25);
        File file2 = createTestFile(dsDir, "testfile2.txt", 0.25);
        String checksum1 = "";
        if (file1 != null && file2 != null) {
            PrintWriter pw = new PrintWriter(new FileWriter(dsDir + "/files.sha"));
            checksum1 = "asasdlfkj880asfdasflj";
            pw.write(checksum1 + " " + file1.getName());
            pw.write("\n");
            pw.close();
        } else {
            fail();
        }
        JobExecutionEntity job = getJob();
        assertEquals(job.getSteps().size(), 1);
        StepExecutionEntity step1 = job.getSteps().get(0);
        Map<String, Long> metrics1 = step1.getMetrics();
        // check job status
        assertEquals(job.getExitStatus(), BatchStatus.COMPLETED.name());
        assertEquals(job.getStatus(), BatchStatus.COMPLETED);
        // check step 1 status and name
        assertEquals(step1.getExitStatus(), BatchStatus.COMPLETED.name());
        assertEquals(step1.getStatus(), BatchStatus.COMPLETED);
        assertEquals(step1.getName(), "import-files");
        // verify step 1 metrics
        assertEquals((long) metrics1.get("write_skip_count"), 0);
        assertEquals((long) metrics1.get("commit_count"), 1);
        assertEquals((long) metrics1.get("process_skip_count"), 0);
        assertEquals((long) metrics1.get("read_skip_count"), 0);
        assertEquals((long) metrics1.get("write_count"), 2);
        assertEquals((long) metrics1.get("rollback_count"), 0);
        assertEquals((long) metrics1.get("filter_count"), 0);
        assertEquals((long) metrics1.get("read_count"), 2);
        // should be no user data (error messages)
        assertEquals(step1.getPersistentUserData(), null);
        // confirm files were imported
        updateDatasetJsonPath();
        List<String> filenames = new ArrayList<>();
        filenames.add(dsPath.getString("data.latestVersion.files[0].dataFile.filename"));
        filenames.add(dsPath.getString("data.latestVersion.files[1].dataFile.filename"));
        assert (filenames.contains("testfile1.txt"));
        assert (filenames.contains("testfile2.txt"));
        // confirm one checksums was imported, one not
        List<String> checksums = new ArrayList<>();
        checksums.add(dsPath.getString("data.latestVersion.files[0].dataFile.checksum.value"));
        checksums.add(dsPath.getString("data.latestVersion.files[1].dataFile.checksum.value"));
        assert (checksums.contains(checksum1));
        assert (checksums.contains("Unknown"));
    } catch (Exception e) {
        System.out.println("Error testChecksumImport: " + e.getMessage());
        e.printStackTrace();
        fail();
    }
}
Also used : StepExecutionEntity(edu.harvard.iq.dataverse.batch.entities.StepExecutionEntity) FileWriter(java.io.FileWriter) JobExecutionEntity(edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity) ArrayList(java.util.ArrayList) File(java.io.File) IOException(java.io.IOException) PrintWriter(java.io.PrintWriter) Ignore(org.junit.Ignore) Test(org.junit.Test)

Aggregations

JobExecutionEntity (edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity)10 ArrayList (java.util.ArrayList)9 StepExecutionEntity (edu.harvard.iq.dataverse.batch.entities.StepExecutionEntity)7 IOException (java.io.IOException)7 Ignore (org.junit.Ignore)7 Test (org.junit.Test)7 File (java.io.File)6 FileWriter (java.io.FileWriter)6 PrintWriter (java.io.PrintWriter)6 JobOperator (javax.batch.operations.JobOperator)3 JobExecution (javax.batch.runtime.JobExecution)3 JobInstance (javax.batch.runtime.JobInstance)2 GET (javax.ws.rs.GET)2 Path (javax.ws.rs.Path)2 Produces (javax.ws.rs.Produces)2 JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)1 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 Response (com.jayway.restassured.response.Response)1 AuthenticatedUser (edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser)1 Timestamp (java.sql.Timestamp)1