use of javax.batch.runtime.StepExecution in project dataverse by IQSS.
the class FileRecordJobListener method afterJob.
/**
* After the job, generate a report and remove the dataset lock
*/
@Override
public void afterJob() throws Exception {
// TODO add notifications to job failure?
if (jobContext.getExitStatus() != null && jobContext.getExitStatus().equals("FAILED")) {
getJobLogger().log(Level.SEVERE, "Job Failed. See Log for more information.");
closeJobLoggerHandlers();
return;
}
// run reporting and notifications
doReport();
// report any unused checksums
HashMap checksumHashMap = (HashMap<String, String>) jobContext.getTransientUserData();
for (Object key : checksumHashMap.keySet()) {
getJobLogger().log(Level.SEVERE, "File listed in checksum manifest not found: " + key);
}
// remove dataset lock
// Disabled now, see L.A.'s comment at beforeJob()
// if (dataset != null && dataset.getId() != null) {
// datasetServiceBean.removeDatasetLock(dataset.getId(), DatasetLock.Reason.Ingest);
// }
getJobLogger().log(Level.INFO, "Removing dataset lock.");
// job step info
JobOperator jobOperator = BatchRuntime.getJobOperator();
StepExecution step = jobOperator.getStepExecutions(jobContext.getInstanceId()).get(0);
getJobLogger().log(Level.INFO, "Job start = " + step.getStartTime());
getJobLogger().log(Level.INFO, "Job end = " + step.getEndTime());
getJobLogger().log(Level.INFO, "Job exit status = " + step.getExitStatus());
closeJobLoggerHandlers();
}
use of javax.batch.runtime.StepExecution in project dataverse by IQSS.
the class JobExecutionEntity method create.
public static JobExecutionEntity create(final JobExecution jobExecution) {
JobOperator jobOperator = BatchRuntime.getJobOperator();
final JobExecutionEntity result = new JobExecutionEntity();
result.id = jobExecution.getExecutionId();
result.name = jobExecution.getJobName();
result.status = jobExecution.getBatchStatus();
result.createTime = new Date(jobExecution.getCreateTime().getTime());
result.lastUpdateTime = new Date(jobExecution.getLastUpdatedTime().getTime());
result.startTime = new Date(jobExecution.getStartTime().getTime());
if (jobExecution.getExitStatus() != null) {
result.exitStatus = jobExecution.getExitStatus();
result.endTime = new Date(jobExecution.getEndTime().getTime());
}
// job parameters
result.properties = new LinkedHashMap<>();
final Properties props = jobOperator.getParameters(jobExecution.getExecutionId());
if (props != null) {
for (String name : props.stringPropertyNames()) {
result.properties.put(name, props.getProperty(name));
}
}
// steps
result.steps = new ArrayList<>();
List<StepExecution> stepExecutionList = jobOperator.getStepExecutions(jobExecution.getExecutionId());
if (stepExecutionList.size() > 0) {
for (StepExecution step : stepExecutionList) {
result.steps.add(StepExecutionEntity.create(step));
}
}
return result;
}
use of javax.batch.runtime.StepExecution in project Payara by payara.
the class JBatchJDBCPersistenceManager method getMostRecentStepExecutionsForJobInstance.
@Override
public Map<String, StepExecution> getMostRecentStepExecutionsForJobInstance(long instanceId) {
Map<String, StepExecution> data = new HashMap<>();
try (Connection conn = getConnection();
PreparedStatement statement = conn.prepareStatement(queryStrings.get(MOST_RECENT_STEPS_FOR_JOB))) {
statement.setLong(1, instanceId);
try (ResultSet rs = statement.executeQuery()) {
while (rs.next()) {
String stepname = null;
stepname = rs.getString("stepname");
if (data.containsKey(stepname)) {
continue;
} else {
long jobexecid = rs.getLong("jobexecid");
long stepexecid = rs.getLong("stepexecid");
String batchstatus = rs.getString("batchstatus");
String exitstatus = rs.getString("exitstatus");
long readCount = rs.getLong("readcount");
long writeCount = rs.getLong("writecount");
long commitCount = rs.getLong("commitcount");
long rollbackCount = rs.getLong("rollbackcount");
long readSkipCount = rs.getLong("readskipcount");
long processSkipCount = rs.getLong("processskipcount");
long filterCount = rs.getLong("filtercount");
long writeSkipCount = rs.getLong("writeSkipCount");
Timestamp startTS = rs.getTimestamp("startTime");
Timestamp endTS = rs.getTimestamp("endTime");
// get the object based data
Serializable persistentData = null;
byte[] pDataBytes = rs.getBytes("persistentData");
if (pDataBytes != null) {
try (ObjectInputStream objectIn = new TCCLObjectInputStream(new ByteArrayInputStream(pDataBytes))) {
persistentData = (Serializable) objectIn.readObject();
}
}
StepExecutionImpl stepEx = new StepExecutionImpl(jobexecid, stepexecid);
stepEx.setBatchStatus(BatchStatus.valueOf(batchstatus));
stepEx.setExitStatus(exitstatus);
stepEx.setStepName(stepname);
stepEx.setReadCount(readCount);
stepEx.setWriteCount(writeCount);
stepEx.setCommitCount(commitCount);
stepEx.setRollbackCount(rollbackCount);
stepEx.setReadSkipCount(readSkipCount);
stepEx.setProcessSkipCount(processSkipCount);
stepEx.setFilterCount(filterCount);
stepEx.setWriteSkipCount(writeSkipCount);
stepEx.setStartTime(startTS);
stepEx.setEndTime(endTS);
stepEx.setPersistentUserData(persistentData);
data.put(stepname, stepEx);
}
}
}
} catch (SQLException | IOException | ClassNotFoundException e) {
throw new PersistenceException(e);
}
return data;
}
use of javax.batch.runtime.StepExecution in project javaee7-samples by javaee-samples.
the class BatchChunkMapperTest method testBatchChunkMapper.
/**
* In the test, we're just going to invoke the batch execution and wait for completion. To validate the test
* expected behaviour we need to query the +javax.batch.runtime.Metric+ object available in the step execution.
*
* The batch process itself will read and process 20 elements from numbers 1 to 20, but only write the odd
* elements. Elements from 1 to 10 will be processed in one partition and elements from 11 to 20 in another
* partition. Commits are executed after 3 elements are read by partition.
*
* @throws Exception an exception if the batch could not complete successfully.
*/
@Test
public void testBatchChunkMapper() throws Exception {
JobOperator jobOperator = getJobOperator();
Long executionId = jobOperator.start("myJob", new Properties());
JobExecution jobExecution = jobOperator.getJobExecution(executionId);
final JobExecution lastExecution = BatchTestHelper.keepTestAlive(jobExecution);
await().atMost(ONE_MINUTE).with().pollInterval(FIVE_HUNDRED_MILLISECONDS).until(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return lastExecution.getBatchStatus() != STARTED;
}
});
List<StepExecution> stepExecutions = jobOperator.getStepExecutions(executionId);
for (StepExecution stepExecution : stepExecutions) {
if (stepExecution.getStepName().equals("myStep")) {
Map<Metric.MetricType, Long> metricsMap = BatchTestHelper.getMetricsMap(stepExecution.getMetrics());
// <1> The read count should be 20 elements. Check +MyItemReader+.
assertEquals(20L, metricsMap.get(READ_COUNT).longValue());
// <2> The write count should be 10. Only half of the elements read are processed to be written.
assertEquals(10L, metricsMap.get(WRITE_COUNT).longValue());
// Number of elements by the item count value on myJob.xml, plus an additional transaction for the
// remaining elements by each partition.
long commitCount = (10L / 3 + (10 % 3 > 0 ? 1 : 0)) * 2;
// <3> The commit count should be 8. Checkpoint is on every 3rd read, 4 commits for read elements and 2 partitions.
assertEquals(commitCount, metricsMap.get(COMMIT_COUNT).longValue());
}
}
// <4> Make sure that all the partitions were created.
assertEquals(2L, totalReaders);
// <5> Job should be completed.
assertEquals(COMPLETED, lastExecution.getBatchStatus());
}
use of javax.batch.runtime.StepExecution in project javaee7-samples by javaee-samples.
the class BatchCSVDatabaseTest method testBatchCSVDatabase.
/**
* In the test, we're just going to invoke the batch execution and wait for completion. To validate the test
* expected behaviour we need to query the +javax.batch.runtime.Metric+ object available in the step execution.
*
* The batch process itself will read and write 7 elements of type +Person+. Commits are executed after 3 elements
* are read.
*
* @throws Exception an exception if the batch could not complete successfully.
*/
@SuppressWarnings("unchecked")
@Test
public void testBatchCSVDatabase() throws Exception {
JobOperator jobOperator = getJobOperator();
Long executionId = jobOperator.start("myJob", new Properties());
JobExecution jobExecution = jobOperator.getJobExecution(executionId);
jobExecution = keepTestAlive(jobExecution);
List<StepExecution> stepExecutions = jobOperator.getStepExecutions(executionId);
for (StepExecution stepExecution : stepExecutions) {
if (stepExecution.getStepName().equals("myStep")) {
Map<Metric.MetricType, Long> metricsMap = BatchTestHelper.getMetricsMap(stepExecution.getMetrics());
// <1> The read count should be 7 elements. Check +MyItemReader+.
assertEquals(7L, metricsMap.get(Metric.MetricType.READ_COUNT).longValue());
// <2> The write count should be the same 7 read elements.
assertEquals(7L, metricsMap.get(Metric.MetricType.WRITE_COUNT).longValue());
// <3> The commit count should be 4. Checkpoint is on every 3rd read, 4 commits for read elements.
assertEquals(3L, metricsMap.get(Metric.MetricType.COMMIT_COUNT).longValue());
}
}
Query query = entityManager.createNamedQuery("Person.findAll");
List<Person> persons = query.getResultList();
// <4> Confirm that the elements were actually persisted into the database.
assertEquals(7L, persons.size());
// <5> Job should be completed.
assertEquals(jobExecution.getBatchStatus(), BatchStatus.COMPLETED);
}
Aggregations