use of org.exolab.castor.mapping.Mapping in project OpenClinica by OpenClinica.
the class ImportCRFDataServlet method processRequest.
@Override
public void processRequest() throws Exception {
resetPanel();
panel.setStudyInfoShown(false);
panel.setOrderedData(true);
FormProcessor fp = new FormProcessor(request);
// checks which module the requests are from
String module = fp.getString(MODULE);
// keep the module in the session
session.setAttribute(MODULE, module);
String action = request.getParameter("action");
CRFVersionBean version = (CRFVersionBean) session.getAttribute("version");
File xsdFile = new File(SpringServletAccess.getPropertiesDir(context) + "ODM1-3-0.xsd");
File xsdFile2 = new File(SpringServletAccess.getPropertiesDir(context) + "ODM1-2-1.xsd");
if (StringUtil.isBlank(action)) {
logger.info("action is blank");
request.setAttribute("version", version);
forwardPage(Page.IMPORT_CRF_DATA);
}
if ("confirm".equalsIgnoreCase(action)) {
String dir = SQLInitServlet.getField("filePath");
if (!new File(dir).exists()) {
logger.info("The filePath in datainfo.properties is invalid " + dir);
addPageMessage(respage.getString("filepath_you_defined_not_seem_valid"));
forwardPage(Page.IMPORT_CRF_DATA);
}
// All the uploaded files will be saved in filePath/crf/original/
String theDir = dir + "crf" + File.separator + "original" + File.separator;
if (!new File(theDir).isDirectory()) {
new File(theDir).mkdirs();
logger.info("Made the directory " + theDir);
}
// MultipartRequest multi = new MultipartRequest(request, theDir, 50 * 1024 * 1024);
File f = null;
try {
f = uploadFile(theDir, version);
} catch (Exception e) {
logger.warn("*** Found exception during file upload***");
e.printStackTrace();
}
if (f == null) {
forwardPage(Page.IMPORT_CRF_DATA);
}
// TODO
// validation steps
// 1. valid xml - validated by file uploader below
// LocalConfiguration config = LocalConfiguration.getInstance();
// config.getProperties().setProperty(
// "org.exolab.castor.parser.namespaces",
// "true");
// config
// .getProperties()
// .setProperty("org.exolab.castor.sax.features",
// "http://xml.org/sax/features/validation,
// http://apache.org/xml/features/validation/schema,
// http://apache.org/xml/features/validation/schema-full-checking");
// // above sets to validate against namespace
Mapping myMap = new Mapping();
// @pgawade 18-April-2011 Fix for issue 8394
String ODM_MAPPING_DIRPath = CoreResources.ODM_MAPPING_DIR;
myMap.loadMapping(ODM_MAPPING_DIRPath + File.separator + "cd_odm_mapping.xml");
Unmarshaller um1 = new Unmarshaller(myMap);
// um1.addNamespaceToPackageMapping("http://www.openclinica.org/ns/odm_ext_v130/v3.1", "OpenClinica");
// um1.addNamespaceToPackageMapping("http://www.cdisc.org/ns/odm/v1.3"
// ,
// "ODMContainer");
boolean fail = false;
ODMContainer odmContainer = new ODMContainer();
session.removeAttribute("odmContainer");
try {
// schemaValidator.validateAgainstSchema(f, xsdFile);
// utf-8 compliance, tbh 06/2009
InputStreamReader isr = new InputStreamReader(new FileInputStream(f), "UTF-8");
odmContainer = (ODMContainer) um1.unmarshal(isr);
logger.debug("Found crf data container for study oid: " + odmContainer.getCrfDataPostImportContainer().getStudyOID());
logger.debug("found length of subject list: " + odmContainer.getCrfDataPostImportContainer().getSubjectData().size());
// 2. validates against ODM 1.3
// check it all below, throw an exception and route to a
// different
// page if not working
// TODO this block of code needs the xerces serializer in order
// to
// work
// StringWriter myWriter = new StringWriter();
// Marshaller m1 = new Marshaller(myWriter);
//
// m1.setProperty("org.exolab.castor.parser.namespaces",
// "true");
// m1
// .setProperty("org.exolab.castor.sax.features",
// "http://xml.org/sax/features/validation,
// http://apache.org/xml/features/validation/schema,
// http://apache.org/xml/features/validation/schema-full-checking
// ");
//
// m1.setMapping(myMap);
// m1.setNamespaceMapping("",
// "http://www.cdisc.org/ns/odm/v1.3");
// m1.setSchemaLocation("http://www.cdisc.org/ns/odm/v1.3
// ODM1-3.xsd");
// m1.marshal(odmContainer);
// if you havent thrown it, you wont throw it here
addPageMessage(respage.getString("passed_xml_validation"));
} catch (Exception me1) {
me1.printStackTrace();
// expanding it to all exceptions, but hoping to catch Marshal
// Exception or SAX Exceptions
logger.info("found exception with xml transform");
//
logger.info("trying 1.2.1");
try {
schemaValidator.validateAgainstSchema(f, xsdFile2);
// for backwards compatibility, we also try to validate vs
// 1.2.1 ODM 06/2008
InputStreamReader isr = new InputStreamReader(new FileInputStream(f), "UTF-8");
odmContainer = (ODMContainer) um1.unmarshal(isr);
} catch (Exception me2) {
// not sure if we want to report me2
MessageFormat mf = new MessageFormat("");
mf.applyPattern(respage.getString("your_xml_is_not_well_formed"));
Object[] arguments = { me1.getMessage() };
addPageMessage(mf.format(arguments));
//
// addPageMessage("Your XML is not well-formed, and does not
// comply with the ODM 1.3 Schema. Please check it, and try
// again. It returned the message: "
// + me1.getMessage());
// me1.printStackTrace();
forwardPage(Page.IMPORT_CRF_DATA);
// you can't really wait to forward because then you throw
// NPEs
// in the next few parts of the code
}
}
// TODO need to output further here
// 2.a. is the study the same one that the user is in right now?
// 3. validates against study metadata
// 3.a. is that study subject in that study?
// 3.b. is that study event def in that study?
// 3.c. is that site in that study?
// 3.d. is that crf version in that study event def?
// 3.e. are those item groups in that crf version?
// 3.f. are those items in that item group?
List<String> errors = getImportCRFDataService().validateStudyMetadata(odmContainer, ub.getActiveStudyId());
if (errors != null) {
// add to session
// forward to another page
logger.info(errors.toString());
for (String error : errors) {
addPageMessage(error);
}
if (errors.size() > 0) {
// fail = true;
forwardPage(Page.IMPORT_CRF_DATA);
} else {
addPageMessage(respage.getString("passed_study_check"));
addPageMessage(respage.getString("passed_oid_metadata_check"));
}
}
logger.debug("passed error check");
// TODO ADD many validation steps before we get to the
// session-setting below
// 4. is the event in the correct status to accept data import?
// -- scheduled, data entry started, completed
// (and the event should already be created)
// (and the event should be independent, ie not affected by other
// events)
Boolean eventCRFStatusesValid = getImportCRFDataService().eventCRFStatusesValid(odmContainer, ub);
ImportCRFInfoContainer importCrfInfo = new ImportCRFInfoContainer(odmContainer, sm.getDataSource());
// The eventCRFBeans list omits EventCRFs that don't match UpsertOn rules. If EventCRF did not exist and
// doesn't match upsert, it won't be created.
List<EventCRFBean> eventCRFBeans = getImportCRFDataService().fetchEventCRFBeans(odmContainer, ub);
List<DisplayItemBeanWrapper> displayItemBeanWrappers = new ArrayList<DisplayItemBeanWrapper>();
HashMap<String, String> totalValidationErrors = new HashMap<String, String>();
HashMap<String, String> hardValidationErrors = new HashMap<String, String>();
// The following map is used for setting the EventCRF status post import.
HashMap<Integer, String> importedCRFStatuses = getImportCRFDataService().fetchEventCRFStatuses(odmContainer);
// method in the ImportCRFDataService is modified for this fix.
if (eventCRFBeans == null) {
fail = true;
addPageMessage(respage.getString("no_event_status_matching"));
} else {
ArrayList<Integer> permittedEventCRFIds = new ArrayList<Integer>();
logger.info("found a list of eventCRFBeans: " + eventCRFBeans.toString());
// List<DisplayItemBeanWrapper> displayItemBeanWrappers = new ArrayList<DisplayItemBeanWrapper>();
// HashMap<String, String> totalValidationErrors = new
// HashMap<String, String>();
// HashMap<String, String> hardValidationErrors = new
// HashMap<String, String>();
logger.debug("found event crfs " + eventCRFBeans.size());
// -- does the event already exist? if not, fail
if (!eventCRFBeans.isEmpty()) {
for (EventCRFBean eventCRFBean : eventCRFBeans) {
DataEntryStage dataEntryStage = eventCRFBean.getStage();
Status eventCRFStatus = eventCRFBean.getStatus();
logger.info("Event CRF Bean: id " + eventCRFBean.getId() + ", data entry stage " + dataEntryStage.getName() + ", status " + eventCRFStatus.getName());
if (eventCRFStatus.equals(Status.AVAILABLE) || dataEntryStage.equals(DataEntryStage.INITIAL_DATA_ENTRY) || dataEntryStage.equals(DataEntryStage.INITIAL_DATA_ENTRY_COMPLETE) || dataEntryStage.equals(DataEntryStage.DOUBLE_DATA_ENTRY_COMPLETE) || dataEntryStage.equals(DataEntryStage.DOUBLE_DATA_ENTRY)) {
// actually want the negative
// was status == available and the stage questions, but
// when you are at 'data entry complete' your status is
// set to 'unavailable'.
// >> tbh 09/2008
// HOWEVER, when one event crf is removed and the rest
// are good, what happens???
// need to create a list and inform that one is blocked
// and the rest are not...
//
permittedEventCRFIds.add(new Integer(eventCRFBean.getId()));
} else {
// fail = true;
// addPageMessage(respage.getString(
// "the_event_crf_not_correct_status"));
// forwardPage(Page.IMPORT_CRF_DATA);
}
}
// did we exclude all the event CRFs? if not, pass, else fail
if (eventCRFBeans.size() >= permittedEventCRFIds.size()) {
addPageMessage(respage.getString("passed_event_crf_status_check"));
} else {
fail = true;
addPageMessage(respage.getString("the_event_crf_not_correct_status"));
}
try {
List<DisplayItemBeanWrapper> tempDisplayItemBeanWrappers = new ArrayList<DisplayItemBeanWrapper>();
tempDisplayItemBeanWrappers = getImportCRFDataService().lookupValidationErrors(request, odmContainer, ub, totalValidationErrors, hardValidationErrors, permittedEventCRFIds);
logger.debug("generated display item bean wrappers " + tempDisplayItemBeanWrappers.size());
logger.debug("size of total validation errors: " + totalValidationErrors.size());
displayItemBeanWrappers.addAll(tempDisplayItemBeanWrappers);
} catch (NullPointerException npe1) {
// what if you have 2 event crfs but the third is a fake?
fail = true;
logger.debug("threw a NPE after calling lookup validation errors");
System.out.println(ExceptionUtils.getStackTrace(npe1));
addPageMessage(respage.getString("an_error_was_thrown_while_validation_errors"));
// npe1.printStackTrace();
} catch (OpenClinicaException oce1) {
fail = true;
logger.debug("threw an OCE after calling lookup validation errors " + oce1.getOpenClinicaMessage());
addPageMessage(oce1.getOpenClinicaMessage());
}
} else if (!eventCRFStatusesValid) {
fail = true;
addPageMessage(respage.getString("the_event_crf_not_correct_status"));
} else {
fail = true;
addPageMessage(respage.getString("no_event_crfs_matching_the_xml_metadata"));
}
// for (HashMap<String, String> crfData : importedData) {
// DisplayItemBeanWrapper displayItemBeanWrapper =
// testing(request,
// crfData);
// displayItemBeanWrappers.add(displayItemBeanWrapper);
// errors = displayItemBeanWrapper.getValidationErrors();
//
// }
}
if (fail) {
logger.debug("failed here - forwarding...");
forwardPage(Page.IMPORT_CRF_DATA);
} else {
addPageMessage(respage.getString("passing_crf_edit_checks"));
session.setAttribute("odmContainer", odmContainer);
session.setAttribute("importedData", displayItemBeanWrappers);
session.setAttribute("validationErrors", totalValidationErrors);
session.setAttribute("hardValidationErrors", hardValidationErrors);
session.setAttribute("importedCRFStatuses", importedCRFStatuses);
session.setAttribute("importCrfInfo", importCrfInfo);
// above are updated 'statically' by the method that originally
// generated the wrappers; soon the only thing we will use
// wrappers for is the 'overwrite' flag
logger.debug("+++ content of total validation errors: " + totalValidationErrors.toString());
SummaryStatsBean ssBean = getImportCRFDataService().generateSummaryStatsBean(odmContainer, displayItemBeanWrappers, importCrfInfo);
session.setAttribute("summaryStats", ssBean);
// will have to set hard edit checks here as well
session.setAttribute("subjectData", odmContainer.getCrfDataPostImportContainer().getSubjectData());
forwardPage(Page.VERIFY_IMPORT_SERVLET);
}
// }
}
}
use of org.exolab.castor.mapping.Mapping in project OpenClinica by OpenClinica.
the class ImportRuleServlet method handleLoadCastor.
private RulesPostImportContainer handleLoadCastor(File xmlFile) {
RulesPostImportContainer ruleImport = null;
try {
// create an XMLContext instance
XMLContext xmlContext = new XMLContext();
// create and set a Mapping instance
Mapping mapping = xmlContext.createMapping();
// mapping.loadMapping(SpringServletAccess.getPropertiesDir(context) + "mapping.xml");
mapping.loadMapping(getCoreResources().getURL("mapping.xml"));
xmlContext.addMapping(mapping);
// create a new Unmarshaller
Unmarshaller unmarshaller = xmlContext.createUnmarshaller();
unmarshaller.setWhitespacePreserve(false);
unmarshaller.setClass(RulesPostImportContainer.class);
// Create a Reader to the file to unmarshal from
FileReader reader = new FileReader(xmlFile);
ruleImport = (RulesPostImportContainer) unmarshaller.unmarshal(reader);
ruleImport.initializeRuleDef();
logRuleImport(ruleImport);
return ruleImport;
} catch (FileNotFoundException ex) {
throw new OpenClinicaSystemException(ex.getMessage(), ex.getCause());
} catch (IOException ex) {
throw new OpenClinicaSystemException(ex.getMessage(), ex.getCause());
} catch (MarshalException e) {
throw new OpenClinicaSystemException(e.getMessage(), e.getCause());
} catch (ValidationException e) {
throw new OpenClinicaSystemException(e.getMessage(), e.getCause());
} catch (MappingException e) {
throw new OpenClinicaSystemException(e.getMessage(), e.getCause());
}
}
use of org.exolab.castor.mapping.Mapping in project OpenClinica by OpenClinica.
the class OpenRosaServices method getFormList.
/**
* @api {get} /rest2/openrosa/:studyOID/formList Get Form List
* @apiName getFormList
* @apiPermission admin
* @apiVersion 3.8.0
* @apiParam {String} studyOID Study Oid.
* @apiGroup Form
* @apiDescription Retrieves a listing of the available OpenClinica forms.
* @apiParamExample {json} Request-Example:
* {
* "studyOid": "S_SAMPLTE",
* }
* @apiSuccessExample {xml} Success-Response:
* HTTP/1.1 200 OK
* {
* <xforms xmlns="http://openrosa.org/xforms/xformsList">
* <xform>
* <formID>F_FIRSTFORM_1</formID>
* <name>First Form</name>
* <majorMinorVersion>1</majorMinorVersion>
* <version>1</version>
* <hash>8678370cd92814d4e3216d58d821403f</hash>
* <downloadUrl>http://oc1.openclinica.com/OpenClinica-web/rest2/openrosa/S_SAMPLTE/formXml?
* formId=F_FIRSTFORM_1</downloadUrl>
* </xform>
* <xform>
* <formID>F_SECONDFORM_1</formID>
* <name>Second Form</name>
* <majorMinorVersion>1</majorMinorVersion>
* <version>1</version>
* <hash>7ee60d1c6516b730bbe9bdbd7cad942f</hash>
* <downloadUrl>http://oc1.openclinica.com/OpenClinica-web/rest2/openrosa/S_SAMPLTE/formXml?
* formId=F_SECONDFORM_1</downloadUrl>
* </xform>
* </xforms>
*/
@GET
@Path("/{studyOID}/formList")
@Produces(MediaType.TEXT_XML)
public String getFormList(@Context HttpServletRequest request, @Context HttpServletResponse response, @PathParam("studyOID") String studyOID, @QueryParam("formID") String uniqueId, @RequestHeader("Authorization") String authorization, @Context ServletContext context) throws Exception {
if (!mayProceedPreview(studyOID))
return null;
XFormList formList = null;
try {
if (StringUtils.isEmpty(uniqueId)) {
List<CrfBean> crfs = crfDao.findAll();
List<FormLayout> formLayouts = formLayoutDao.findAll();
formList = new XFormList();
for (CrfBean crf : crfs) {
for (FormLayout formLayout : formLayouts) {
if (formLayout.getCrf().getCrfId() == crf.getCrfId()) {
XForm form = new XForm(crf, formLayout);
// TODO: Need to generate hash based on contents of
// XForm. Will be done in a later story.
// TODO: For now all XForms get a date based hash to
// trick Enketo into always downloading
// TODO: them.
String urlBase = getCoreResources().getDataInfo().getProperty("sysURL").split("/MainMenu")[0];
form.setDownloadURL(urlBase + "/rest2/openrosa/" + studyOID + "/formXml?formId=" + formLayout.getOcOid());
List<FormLayoutMedia> mediaList = formLayoutMediaDao.findByFormLayoutIdForNoteTypeMedia(formLayout.getFormLayoutId());
if (mediaList != null && mediaList.size() > 0) {
form.setManifestURL(urlBase + "/rest2/openrosa/" + studyOID + "/manifest?formId=" + formLayout.getOcOid());
}
formList.add(form);
}
}
}
} else {
formList = getForm(request, response, studyOID, uniqueId, authorization, context);
}
// Create the XML formList using a Castor mapping file.
XMLContext xmlContext = new XMLContext();
Mapping mapping = xmlContext.createMapping();
mapping.loadMapping(getCoreResources().getURL("openRosaFormListMapping.xml"));
xmlContext.addMapping(mapping);
Marshaller marshaller = xmlContext.createMarshaller();
StringWriter writer = new StringWriter();
marshaller.setWriter(writer);
marshaller.marshal(formList);
// Set response headers
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
Date currentDate = new Date();
cal.setTime(currentDate);
SimpleDateFormat format = new SimpleDateFormat("E, dd MMM yyyy HH:mm:ss zz");
format.setCalendar(cal);
response.setHeader("Content-Type", "text/xml; charset=UTF-8");
response.setHeader("Date", format.format(currentDate));
response.setHeader("X-OpenRosa-Version", "1.0");
return writer.toString();
} catch (Exception e) {
LOGGER.error(e.getMessage());
LOGGER.error(ExceptionUtils.getStackTrace(e));
return "<Error>" + e.getMessage() + "</Error>";
}
}
use of org.exolab.castor.mapping.Mapping in project camel by apache.
the class AbstractCastorDataFormat method createXMLContext.
protected XMLContext createXMLContext(ClassResolver resolver, ClassLoader contextClassLoader) throws Exception {
XMLContext xmlContext = new XMLContext();
if (ObjectHelper.isNotEmpty(getMappingFile())) {
Mapping xmlMap;
if (contextClassLoader != null) {
xmlMap = new Mapping(contextClassLoader);
} else {
xmlMap = new Mapping();
}
xmlMap.loadMapping(resolver.loadResourceAsURL(getMappingFile()));
xmlContext.addMapping(xmlMap);
}
if (getPackages() != null) {
xmlContext.addPackages(getPackages());
}
if (getClassNames() != null) {
for (String name : getClassNames()) {
Class<?> clazz = resolver.resolveClass(name);
xmlContext.addClass(clazz);
}
}
return xmlContext;
}
use of org.exolab.castor.mapping.Mapping in project spring-framework by spring-projects.
the class CastorMarshaller method createXMLContext.
/**
* Create the Castor {@code XMLContext}. Subclasses can override this to create a custom context.
* <p>The default implementation loads mapping files if defined, or the target class or packages if defined.
* @return the created resolver
* @throws MappingException when the mapping file cannot be loaded
* @throws IOException in case of I/O errors
* @see XMLContext#addMapping(org.exolab.castor.mapping.Mapping)
* @see XMLContext#addClass(Class)
*/
protected XMLContext createXMLContext(Resource[] mappingLocations, Class<?>[] targetClasses, String[] targetPackages) throws MappingException, ResolverException, IOException {
XMLContext context = new XMLContext();
if (!ObjectUtils.isEmpty(mappingLocations)) {
Mapping mapping = new Mapping();
for (Resource mappingLocation : mappingLocations) {
mapping.loadMapping(SaxResourceUtils.createInputSource(mappingLocation));
}
context.addMapping(mapping);
}
if (!ObjectUtils.isEmpty(targetClasses)) {
context.addClasses(targetClasses);
}
if (!ObjectUtils.isEmpty(targetPackages)) {
context.addPackages(targetPackages);
}
if (this.castorProperties != null) {
for (Map.Entry<String, String> property : this.castorProperties.entrySet()) {
context.setProperty(property.getKey(), property.getValue());
}
}
return context;
}
Aggregations