use of org.akaza.openclinica.web.job.TriggerService in project OpenClinica by OpenClinica.
the class UpdateJobExportServlet method processRequest.
@Override
protected void processRequest() throws Exception {
FormProcessor fp = new FormProcessor(request);
TriggerService triggerService = new TriggerService();
String action = fp.getString("action");
String triggerName = fp.getString("tname");
scheduler = getScheduler();
ExtractUtils extractUtils = new ExtractUtils();
Trigger updatingTrigger = scheduler.getTrigger(triggerName.trim(), XsltTriggerService.TRIGGER_GROUP_NAME);
if (StringUtil.isBlank(action)) {
setUpServlet(updatingTrigger);
forwardPage(Page.UPDATE_JOB_EXPORT);
} else if ("confirmall".equalsIgnoreCase(action)) {
// change and update trigger here
// validate first
// then update or send back
HashMap errors = validateForm(fp, request, scheduler.getTriggerNames(XsltTriggerService.TRIGGER_GROUP_NAME), updatingTrigger.getName());
if (!errors.isEmpty()) {
// send back
addPageMessage("Your modifications caused an error, please see the messages for more information.");
setUpServlet(updatingTrigger);
logger.error("errors : " + errors.toString());
forwardPage(Page.UPDATE_JOB_EXPORT);
} else {
// change trigger, update in database
StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
StudyBean study = (StudyBean) studyDAO.findByPK(sm.getUserBean().getActiveStudyId());
DatasetDAO datasetDao = new DatasetDAO(sm.getDataSource());
CoreResources cr = new CoreResources();
UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
int datasetId = fp.getInt(DATASET_ID);
String period = fp.getString(PERIOD);
String email = fp.getString(EMAIL);
String jobName = fp.getString(JOB_NAME);
String jobDesc = fp.getString(JOB_DESC);
Date startDateTime = fp.getDateTime(DATE_START_JOB);
Integer exportFormatId = fp.getInt(FORMAT_ID);
ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(exportFormatId, "" + datasetId);
DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
String[] files = epBean.getFileName();
String exportFileName;
int fileSize = files.length;
int cnt = 0;
dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
String[] exportFiles = epBean.getExportFileName();
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
int i = 0;
String[] temp = new String[exportFiles.length];
// JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
String datasetFilePath = SQLInitServlet.getField("filePath") + "datasets";
while (i < exportFiles.length) {
temp[i] = extractUtils.resolveVars(exportFiles[i], dsBean, sdfDir, datasetFilePath);
i++;
}
epBean.setDoNotDelFiles(temp);
epBean.setExportFileName(temp);
XsltTriggerService xsltService = new XsltTriggerService();
String generalFileDir = SQLInitServlet.getField("filePath");
generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
exportFileName = epBean.getExportFileName()[cnt];
String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
String endFilePath = epBean.getFileLocation();
endFilePath = extractUtils.getEndFilePath(endFilePath, dsBean, sdfDir, datasetFilePath);
// exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
if (epBean.getPostProcExportName() != null) {
String preProcExportPathName = extractUtils.resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcExportName(preProcExportPathName);
}
if (epBean.getPostProcLocation() != null) {
String prePocLoc = extractUtils.getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcLocation(prePocLoc);
}
extractUtils.setAllProps(epBean, dsBean, sdfDir, datasetFilePath);
SimpleTrigger trigger = null;
trigger = xsltService.generateXsltTrigger(xsltPath, // xml_file_path
generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", TRIGGER_GROUP_JOB);
// Updating the original trigger with user given inputs
trigger.setRepeatCount(64000);
trigger.setRepeatInterval(XsltTriggerService.getIntervalTime(period));
trigger.setDescription(jobDesc);
// set just the start date
trigger.setStartTime(startDateTime);
// + datasetId);
trigger.setName(jobName);
trigger.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT);
trigger.getJobDataMap().put(XsltTriggerService.EMAIL, email);
trigger.getJobDataMap().put(XsltTriggerService.PERIOD, period);
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT, epBean.getFiledescription());
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT_ID, exportFormatId);
trigger.getJobDataMap().put(XsltTriggerService.JOB_NAME, jobName);
JobDetailBean jobDetailBean = new JobDetailBean();
jobDetailBean.setGroup(xsltService.TRIGGER_GROUP_NAME);
jobDetailBean.setName(trigger.getName());
jobDetailBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
jobDetailBean.setJobDataMap(trigger.getJobDataMap());
// need durability?
jobDetailBean.setDurability(true);
jobDetailBean.setVolatility(false);
try {
// scheduler.unscheduleJob(triggerName, "DEFAULT");
scheduler.deleteJob(triggerName, XsltTriggerService.TRIGGER_GROUP_NAME);
Date dataStart = scheduler.scheduleJob(jobDetailBean, trigger);
// Date dateStart = scheduler.rescheduleJob(triggerName,
// "DEFAULT", trigger);
// scheduler.rescheduleJob(triggerName, groupName,
// newTrigger)
addPageMessage("Your job has been successfully modified.");
forwardPage(Page.VIEW_JOB_SERVLET);
} catch (SchedulerException se) {
se.printStackTrace();
// set a message here with the exception message
setUpServlet(trigger);
addPageMessage("There was an unspecified error with your creation, please contact an administrator.");
forwardPage(Page.UPDATE_JOB_EXPORT);
}
}
}
}
use of org.akaza.openclinica.web.job.TriggerService in project OpenClinica by OpenClinica.
the class DataImportService method validateData.
/**
* Import Data, the logic which imports the data for our data service. Note that we will return three strings string
* 0: status, either 'success', 'fail', or 'warn'. string 1: the message string which will be returned in our soap
* response string 2: the audit message, currently not used but will be saved in the event of a success.
*
* import consist from 3 steps 1) parse xml and extract data 2) validation 3) data submission
*
* @author thickerson
* @param dataSource
* @param resources
* @param studyBean
* @param userBean
* @param xml
* @return
* @throws Exception
*
* /* VALIDATE data on all levels
*
* msg - contains status messages
* @return list of errors
*/
public List<String> validateData(ODMContainer odmContainer, DataSource dataSource, CoreResources resources, StudyBean studyBean, UserAccountBean userBean, List<DisplayItemBeanWrapper> displayItemBeanWrappers, HashMap<Integer, String> importedCRFStatuses) {
ResourceBundle respage = ResourceBundleProvider.getPageMessagesBundle();
setRespage(respage);
TriggerService triggerService = new TriggerService();
StringBuffer auditMsg = new StringBuffer();
List<String> errors = new ArrayList<String>();
// htaycher: return back later?
auditMsg.append(respage.getString("passed_study_check") + " ");
auditMsg.append(respage.getString("passed_oid_metadata_check") + " ");
// validation errors, the same as in the ImportCRFDataServlet. DRY?
Boolean eventCRFStatusesValid = getImportCRFDataService(dataSource).eventCRFStatusesValid(odmContainer, userBean);
List<EventCRFBean> eventCRFBeans = getImportCRFDataService(dataSource).fetchEventCRFBeans(odmContainer, userBean);
// The following line updates a map that is used for setting the EventCRF status post import
getImportCRFDataService(dataSource).fetchEventCRFStatuses(odmContainer, importedCRFStatuses);
ArrayList<Integer> permittedEventCRFIds = new ArrayList<Integer>();
// -- does the event already exist? if not, fail
if (eventCRFBeans == null) {
errors.add(respage.getString("the_event_crf_not_correct_status"));
return errors;
} else if (eventCRFBeans.isEmpty() && !eventCRFStatusesValid) {
errors.add(respage.getString("the_event_crf_not_correct_status"));
return errors;
} else if (eventCRFBeans.isEmpty()) {
errors.add(respage.getString("no_event_crfs_matching_the_xml_metadata"));
return errors;
}
logger.debug("found a list of eventCRFBeans: " + eventCRFBeans.toString());
for (EventCRFBean eventCRFBean : eventCRFBeans) {
DataEntryStage dataEntryStage = eventCRFBean.getStage();
Status eventCRFStatus = eventCRFBean.getStatus();
logger.debug("Event CRF Bean: id " + eventCRFBean.getId() + ", data entry stage " + dataEntryStage.getName() + ", status " + eventCRFStatus.getName());
if (eventCRFStatus.equals(Status.AVAILABLE) || dataEntryStage.equals(DataEntryStage.INITIAL_DATA_ENTRY) || dataEntryStage.equals(DataEntryStage.INITIAL_DATA_ENTRY_COMPLETE) || dataEntryStage.equals(DataEntryStage.DOUBLE_DATA_ENTRY_COMPLETE) || dataEntryStage.equals(DataEntryStage.DOUBLE_DATA_ENTRY)) {
permittedEventCRFIds.add(new Integer(eventCRFBean.getId()));
} else {
errors.add(respage.getString("your_listed_crf_in_the_file") + " " + eventCRFBean.getEventName());
continue;
}
}
if (eventCRFBeans.size() >= permittedEventCRFIds.size()) {
auditMsg.append(respage.getString("passed_event_crf_status_check") + " ");
} else {
auditMsg.append(respage.getString("the_event_crf_not_correct_status") + " ");
}
// List<DisplayItemBeanWrapper> displayItemBeanWrappers = new ArrayList<DisplayItemBeanWrapper>();
HashMap<String, String> totalValidationErrors = new HashMap<String, String>();
HashMap<String, String> hardValidationErrors = new HashMap<String, String>();
try {
List<DisplayItemBeanWrapper> tempDisplayItemBeanWrappers = new ArrayList<DisplayItemBeanWrapper>();
// htaycher: this should be rewritten with validator not to use request to store data
MockHttpServletRequest request = new MockHttpServletRequest();
request.addPreferredLocale(getLocale());
tempDisplayItemBeanWrappers = getImportCRFDataService(dataSource).lookupValidationErrors(request, odmContainer, userBean, totalValidationErrors, hardValidationErrors, permittedEventCRFIds);
displayItemBeanWrappers.addAll(tempDisplayItemBeanWrappers);
logger.debug("size of total validation errors: " + (totalValidationErrors.size() + hardValidationErrors.size()));
ArrayList<SubjectDataBean> subjectData = odmContainer.getCrfDataPostImportContainer().getSubjectData();
if (!hardValidationErrors.isEmpty()) {
// check here where to get group repeat key
errors.add(triggerService.generateHardValidationErrorMessage(subjectData, hardValidationErrors, "1"));
}
if (!totalValidationErrors.isEmpty()) {
errors.add(triggerService.generateHardValidationErrorMessage(subjectData, totalValidationErrors, "1"));
}
} catch (NullPointerException npe1) {
// what if you have 2 event crfs but the third is a fake?
npe1.printStackTrace();
errors.add(respage.getString("an_error_was_thrown_while_validation_errors"));
logger.debug("=== threw the null pointer, import === " + npe1.getMessage());
} catch (OpenClinicaException oce1) {
errors.add(oce1.getOpenClinicaMessage());
logger.debug("=== threw the openclinica message, import === " + oce1.getOpenClinicaMessage());
}
auditMsg.append(respage.getString("passing_crf_edit_checks") + " ");
return errors;
}
use of org.akaza.openclinica.web.job.TriggerService in project OpenClinica by OpenClinica.
the class CreateJobImportServlet method processRequest.
@Override
protected void processRequest() throws Exception {
// TODO multi stage servlet to generate import jobs
// validate form, create job and return to view jobs servlet
FormProcessor fp = new FormProcessor(request);
TriggerService triggerService = new TriggerService();
scheduler = getScheduler();
String action = fp.getString("action");
if (StringUtil.isBlank(action)) {
// set up list of data sets
// select by ... active study
setUpServlet();
forwardPage(Page.CREATE_JOB_IMPORT);
} else if ("confirmall".equalsIgnoreCase(action)) {
// collect form information
HashMap errors = triggerService.validateImportJobForm(fp, request, scheduler.getTriggerNames(IMPORT_TRIGGER));
if (!errors.isEmpty()) {
// set errors to request
request.setAttribute("formMessages", errors);
logger.debug("has validation errors in the first section" + errors.toString());
setUpServlet();
forwardPage(Page.CREATE_JOB_IMPORT);
} else {
logger.info("found no validation errors, continuing");
int studyId = fp.getInt(STUDY_ID);
StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
StudyBean studyBean = (StudyBean) studyDAO.findByPK(studyId);
SimpleTrigger trigger = triggerService.generateImportTrigger(fp, sm.getUserBean(), studyBean, LocaleResolver.getLocale(request).getLanguage());
// SimpleTrigger trigger = new SimpleTrigger();
JobDetailBean jobDetailBean = new JobDetailBean();
jobDetailBean.setGroup(IMPORT_TRIGGER);
jobDetailBean.setName(trigger.getName());
jobDetailBean.setJobClass(org.akaza.openclinica.web.job.ImportStatefulJob.class);
jobDetailBean.setJobDataMap(trigger.getJobDataMap());
// need durability?
jobDetailBean.setDurability(true);
jobDetailBean.setVolatility(false);
// set to the scheduler
try {
Date dateStart = scheduler.scheduleJob(jobDetailBean, trigger);
logger.debug("== found job date: " + dateStart.toString());
// set a success message here
addPageMessage("You have successfully created a new job: " + trigger.getName() + " which is now set to run at the time you specified.");
forwardPage(Page.VIEW_IMPORT_JOB_SERVLET);
} catch (SchedulerException se) {
se.printStackTrace();
// set a message here with the exception message
setUpServlet();
addPageMessage("There was an unspecified error with your creation, please contact an administrator.");
forwardPage(Page.CREATE_JOB_IMPORT);
}
}
} else {
forwardPage(Page.ADMIN_SYSTEM);
// forward to form
// should we even get to this part?
}
}
use of org.akaza.openclinica.web.job.TriggerService in project OpenClinica by OpenClinica.
the class CreateJobExportServlet method processRequest.
@Override
protected void processRequest() throws Exception {
// TODO multi stage servlet which will create export jobs
// will accept, create, and return the ViewJob servlet
FormProcessor fp = new FormProcessor(request);
TriggerService triggerService = new TriggerService();
scheduler = getScheduler();
String action = fp.getString("action");
ExtractUtils extractUtils = new ExtractUtils();
if (StringUtil.isBlank(action)) {
// set up list of data sets
// select by ... active study
setUpServlet();
forwardPage(Page.CREATE_JOB_EXPORT);
} else if ("confirmall".equalsIgnoreCase(action)) {
// collect form information
HashMap errors = validateForm(fp, request, scheduler.getTriggerNames(XsltTriggerService.TRIGGER_GROUP_NAME), "");
if (!errors.isEmpty()) {
// set errors to request
request.setAttribute("formMessages", errors);
logger.info("has validation errors in the first section");
logger.info("errors found: " + errors.toString());
setUpServlet();
forwardPage(Page.CREATE_JOB_EXPORT);
} else {
logger.info("found no validation errors, continuing");
StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
DatasetDAO datasetDao = new DatasetDAO(sm.getDataSource());
UserAccountBean userBean = (UserAccountBean) request.getSession().getAttribute("userBean");
CoreResources cr = new CoreResources();
int datasetId = fp.getInt(DATASET_ID);
String period = fp.getString(PERIOD);
String email = fp.getString(EMAIL);
String jobName = fp.getString(JOB_NAME);
String jobDesc = fp.getString(JOB_DESC);
Date startDateTime = fp.getDateTime(DATE_START_JOB);
Integer exportFormatId = fp.getInt(FORMAT_ID);
ExtractPropertyBean epBean = cr.findExtractPropertyBeanById(exportFormatId, "" + datasetId);
DatasetBean dsBean = (DatasetBean) datasetDao.findByPK(new Integer(datasetId).intValue());
// set the job in motion
String[] files = epBean.getFileName();
String exportFileName;
int fileSize = files.length;
int cnt = 0;
dsBean.setName(dsBean.getName().replaceAll(" ", "_"));
String[] exportFiles = epBean.getExportFileName();
String pattern = "yyyy" + File.separator + "MM" + File.separator + "dd" + File.separator + "HHmmssSSS" + File.separator;
SimpleDateFormat sdfDir = new SimpleDateFormat(pattern);
int i = 0;
String[] temp = new String[exportFiles.length];
// JN: The following logic is for comma separated variables, to avoid the second file be treated as a old file and deleted.
String datasetFilePath = SQLInitServlet.getField("filePath") + "datasets";
while (i < exportFiles.length) {
temp[i] = extractUtils.resolveVars(exportFiles[i], dsBean, sdfDir, datasetFilePath);
i++;
}
epBean.setDoNotDelFiles(temp);
epBean.setExportFileName(temp);
XsltTriggerService xsltService = new XsltTriggerService();
String generalFileDir = SQLInitServlet.getField("filePath");
generalFileDir = generalFileDir + "datasets" + File.separator + dsBean.getId() + File.separator + sdfDir.format(new java.util.Date());
exportFileName = epBean.getExportFileName()[cnt];
// need to set the dataset path here, tbh
// next, can already run jobs, translations, and then add a message to be notified later
// JN all the properties need to have the variables...
String xsltPath = SQLInitServlet.getField("filePath") + "xslt" + File.separator + files[cnt];
String endFilePath = epBean.getFileLocation();
endFilePath = extractUtils.getEndFilePath(endFilePath, dsBean, sdfDir, datasetFilePath);
// exportFileName = resolveVars(exportFileName,dsBean,sdfDir);
if (epBean.getPostProcExportName() != null) {
// String preProcExportPathName = getEndFilePath(epBean.getPostProcExportName(),dsBean,sdfDir);
String preProcExportPathName = extractUtils.resolveVars(epBean.getPostProcExportName(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcExportName(preProcExportPathName);
}
if (epBean.getPostProcLocation() != null) {
String prePocLoc = extractUtils.getEndFilePath(epBean.getPostProcLocation(), dsBean, sdfDir, datasetFilePath);
epBean.setPostProcLocation(prePocLoc);
}
extractUtils.setAllProps(epBean, dsBean, sdfDir, datasetFilePath);
SimpleTrigger trigger = null;
trigger = xsltService.generateXsltTrigger(xsltPath, // xml_file_path
generalFileDir, endFilePath + File.separator, exportFileName, dsBean.getId(), epBean, userBean, LocaleResolver.getLocale(request).getLanguage(), cnt, SQLInitServlet.getField("filePath") + "xslt", xsltService.getTriggerGroupNameForExportJobs());
// Updating the original trigger with user given inputs
trigger.setRepeatCount(64000);
trigger.setRepeatInterval(XsltTriggerService.getIntervalTime(period));
trigger.setDescription(jobDesc);
// set just the start date
trigger.setStartTime(startDateTime);
// + datasetId);
trigger.setName(jobName);
trigger.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT);
trigger.getJobDataMap().put(XsltTriggerService.EMAIL, email);
trigger.getJobDataMap().put(XsltTriggerService.PERIOD, period);
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT, epBean.getFiledescription());
trigger.getJobDataMap().put(XsltTriggerService.EXPORT_FORMAT_ID, exportFormatId);
trigger.getJobDataMap().put(XsltTriggerService.JOB_NAME, jobName);
trigger.getJobDataMap().put("job_type", "exportJob");
JobDetailBean jobDetailBean = new JobDetailBean();
jobDetailBean.setGroup(xsltService.getTriggerGroupNameForExportJobs());
jobDetailBean.setName(trigger.getName());
jobDetailBean.setJobClass(org.akaza.openclinica.job.XsltStatefulJob.class);
jobDetailBean.setJobDataMap(trigger.getJobDataMap());
// need durability?
jobDetailBean.setDurability(true);
jobDetailBean.setVolatility(false);
// set to the scheduler
try {
Date dateStart = scheduler.scheduleJob(jobDetailBean, trigger);
logger.info("== found job date: " + dateStart.toString());
// set a success message here
} catch (SchedulerException se) {
se.printStackTrace();
setUpServlet();
addPageMessage("Error creating Job.");
forwardPage(Page.VIEW_JOB_SERVLET);
return;
}
setUpServlet();
addPageMessage("You have successfully created a new job: " + jobName + " which is now set to run at the time you specified.");
forwardPage(Page.VIEW_JOB_SERVLET);
}
} else {
forwardPage(Page.ADMIN_SYSTEM);
// forward to form
// should we even get to this part?
}
}
use of org.akaza.openclinica.web.job.TriggerService in project OpenClinica by OpenClinica.
the class UpdateJobImportServlet method processRequest.
@Override
protected void processRequest() throws Exception {
FormProcessor fp = new FormProcessor(request);
TriggerService triggerService = new TriggerService();
String action = fp.getString("action");
String triggerName = fp.getString("tname");
scheduler = getScheduler();
logger.debug("found trigger name " + triggerName);
Trigger trigger = scheduler.getTrigger(triggerName, TRIGGER_IMPORT_GROUP);
// System.out.println("found trigger from the other side " + trigger.getFullName());
if (StringUtil.isBlank(action)) {
setUpServlet(trigger);
forwardPage(Page.UPDATE_JOB_IMPORT);
} else if ("confirmall".equalsIgnoreCase(action)) {
HashMap errors = triggerService.validateImportJobForm(fp, request, scheduler.getTriggerNames("DEFAULT"), trigger.getName());
if (!errors.isEmpty()) {
// send back
addPageMessage("Your modifications caused an error, please see the messages for more information.");
setUpServlet(trigger);
forwardPage(Page.UPDATE_JOB_IMPORT);
} else {
StudyDAO studyDAO = new StudyDAO(sm.getDataSource());
int studyId = fp.getInt(CreateJobImportServlet.STUDY_ID);
StudyBean study = (StudyBean) studyDAO.findByPK(studyId);
// in the place of a users' current study, tbh
Date startDate = trigger.getStartTime();
trigger = triggerService.generateImportTrigger(fp, sm.getUserBean(), study, startDate, LocaleResolver.getLocale(request).getLanguage());
// scheduler = getScheduler();
JobDetailBean jobDetailBean = new JobDetailBean();
jobDetailBean.setGroup(TRIGGER_IMPORT_GROUP);
jobDetailBean.setName(trigger.getName());
jobDetailBean.setJobClass(org.akaza.openclinica.web.job.ImportStatefulJob.class);
jobDetailBean.setJobDataMap(trigger.getJobDataMap());
// need durability?
jobDetailBean.setDurability(true);
jobDetailBean.setVolatility(false);
try {
scheduler.deleteJob(triggerName, TRIGGER_IMPORT_GROUP);
Date dateStart = scheduler.scheduleJob(jobDetailBean, trigger);
addPageMessage("Your job has been successfully modified.");
forwardPage(Page.VIEW_IMPORT_JOB_SERVLET);
} catch (SchedulerException se) {
se.printStackTrace();
// set a message here with the exception message
setUpServlet(trigger);
addPageMessage("There was an unspecified error with your creation, please contact an administrator.");
forwardPage(Page.UPDATE_JOB_IMPORT);
}
}
}
}
Aggregations