use of org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean in project OpenClinica by OpenClinica.
the class GenerateExtractFileService method createFile.
public int createFile(String name, String dir, String content, DatasetBean datasetBean, long time, ExportFormatBean efb, boolean saveToDB, UserAccountBean userBean) {
ArchivedDatasetFileBean fbFinal = new ArchivedDatasetFileBean();
// >> tbh 04/2010 #4915 replace all names' spaces with underscores
name = name.replaceAll(" ", "_");
fbFinal.setId(0);
try {
File complete = new File(dir);
if (!complete.isDirectory()) {
complete.mkdirs();
}
File newFile = new File(complete, name);
newFile.setLastModified(System.currentTimeMillis());
BufferedWriter w = new BufferedWriter(new FileWriter(newFile));
w.write(content);
w.close();
logger.info("finished writing the text file...");
// now, we write the file to the zip file
FileInputStream is = new FileInputStream(newFile);
ZipOutputStream z = new ZipOutputStream(new FileOutputStream(new File(complete, name + ".zip")));
logger.info("created zip output stream...");
// we write over the content no matter what
// we then check to make sure there are no duplicates
// TODO need to change the above -- save all content!
// z.write(content);
z.putNextEntry(new java.util.zip.ZipEntry(name));
// int length = (int) newFile.length();
int bytesRead;
byte[] buff = new byte[512];
// while (-1 != (bytesRead = bis.read(buff, 0, buff.length))) {
while ((bytesRead = is.read(buff)) != -1) {
z.write(buff, 0, bytesRead);
}
logger.info("writing buffer...");
// }
z.closeEntry();
z.finish();
// w2.close();
if (is != null) {
try {
is.close();
} catch (java.io.IOException ie) {
ie.printStackTrace();
}
}
logger.info("finished zipping up file...");
// set up the zip to go into the database
if (saveToDB) {
ArchivedDatasetFileBean fb = new ArchivedDatasetFileBean();
fb.setName(name + ".zip");
// logger.info("ODM filename: " + name + ".zip");
fb.setFileReference(dir + name + ".zip");
// logger.info("ODM fileReference: " + dir + name + ".zip");
// current location of the file on the system
fb.setFileSize((int) newFile.length());
// logger.info("ODM setFileSize: " + (int)newFile.length() );
// set the above to compressed size?
fb.setRunTime((int) time);
// logger.info("ODM setRunTime: " + (int)time );
// need to set this in milliseconds, get it passed from above
// methods?
fb.setDatasetId(datasetBean.getId());
// logger.info("ODM setDatasetid: " + ds.getId() );
fb.setExportFormatBean(efb);
// logger.info("ODM setExportFormatBean: success" );
fb.setExportFormatId(efb.getId());
// logger.info("ODM setExportFormatId: " + efb.getId());
fb.setOwner(userBean);
// logger.info("ODM setOwner: " + sm.getUserBean());
fb.setOwnerId(userBean.getId());
// logger.info("ODM setOwnerId: " + sm.getUserBean().getId() );
fb.setDateCreated(new Date(System.currentTimeMillis()));
boolean write = true;
ArchivedDatasetFileDAO asdfDAO = new ArchivedDatasetFileDAO(ds);
// eliminating all checks so that we create multiple files, tbh 6-7
if (write) {
fbFinal = (ArchivedDatasetFileBean) asdfDAO.create(fb);
} else {
logger.info("duplicate found: " + fb.getName());
}
}
// created in database!
} catch (Exception e) {
logger.error("-- exception thrown at createFile: " + e.getMessage());
e.printStackTrace();
}
return fbFinal.getId();
}
use of org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean in project OpenClinica by OpenClinica.
the class ShowFileServlet method processRequest.
@Override
public void processRequest() throws Exception {
FormProcessor fp = new FormProcessor(request);
int fileId = fp.getInt("fileId");
int dsId = fp.getInt("datasetId");
DatasetDAO dsdao = new DatasetDAO(sm.getDataSource());
DatasetBean db = (DatasetBean) dsdao.findByPK(dsId);
ArchivedDatasetFileDAO asdfdao = new ArchivedDatasetFileDAO(sm.getDataSource());
ArchivedDatasetFileBean asdfBean = (ArchivedDatasetFileBean) asdfdao.findByPK(fileId);
ArrayList newFileList = new ArrayList();
newFileList.add(asdfBean);
// request.setAttribute("filelist",newFileList);
ArrayList filterRows = ArchivedDatasetFileRow.generateRowsFromBeans(newFileList);
EntityBeanTable table = fp.getEntityBeanTable();
String[] columns = { resword.getString("file_name"), resword.getString("run_time"), resword.getString("file_size"), resword.getString("created_date"), resword.getString("created_by") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(0);
table.hideColumnLink(1);
table.hideColumnLink(2);
table.hideColumnLink(3);
table.hideColumnLink(4);
// table.setQuery("ExportDataset?datasetId=" +db.getId(), new
// HashMap());
// trying to continue...
// session.setAttribute("newDataset",db);
request.setAttribute("dataset", db);
request.setAttribute("file", asdfBean);
table.setRows(filterRows);
table.computeDisplay();
request.setAttribute("table", table);
Page finalTarget = Page.EXPORT_DATA_CUSTOM;
finalTarget.setFileName("/WEB-INF/jsp/extract/generateMetadataFile.jsp");
forwardPage(finalTarget);
}
use of org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean in project OpenClinica by OpenClinica.
the class ArchivedDatasetFileRow method compareColumn.
@Override
protected int compareColumn(Object row, int sortingColumn) {
if (!row.getClass().equals(ArchivedDatasetFileRow.class)) {
return 0;
}
ArchivedDatasetFileBean thisAccount = (ArchivedDatasetFileBean) bean;
ArchivedDatasetFileBean argAccount = (ArchivedDatasetFileBean) ((ArchivedDatasetFileRow) row).bean;
int answer = 0;
switch(sortingColumn) {
case COL_FILENAME:
answer = thisAccount.getName().toLowerCase().compareTo(argAccount.getName().toLowerCase());
break;
case COL_FILECREATEDDATE:
answer = thisAccount.getDateCreated().compareTo(argAccount.getDateCreated());
break;
}
return answer;
}
use of org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean in project OpenClinica by OpenClinica.
the class ExportDatasetServlet method generateFileBean.
public ArchivedDatasetFileBean generateFileBean(File datasetFile, String relativePath, int formatId) {
ArchivedDatasetFileBean adfb = new ArchivedDatasetFileBean();
adfb.setName(datasetFile.getName());
if (datasetFile.canRead()) {
logger.info("File can be read");
} else {
logger.info("File CANNOT be read");
}
logger.info("Found file length: " + datasetFile.length());
logger.info("Last Modified: " + datasetFile.lastModified());
adfb.setFileSize(new Long(datasetFile.length()).intValue());
adfb.setExportFormatId(formatId);
adfb.setWebPath(relativePath);
adfb.setDateCreated(new java.util.Date(datasetFile.lastModified()));
return adfb;
}
use of org.akaza.openclinica.bean.extract.ArchivedDatasetFileBean in project OpenClinica by OpenClinica.
the class ExportDatasetServlet method loadList.
public void loadList(DatasetBean db, ArchivedDatasetFileDAO asdfdao, int datasetId, FormProcessor fp, ExtractBean eb) {
logger.info("action is blank");
request.setAttribute("dataset", db);
logger.info("just set dataset to request");
request.setAttribute("extractProperties", CoreResources.getExtractProperties());
// find out if there are any files here:
File currentDir = new File(DATASET_DIR + db.getId() + File.separator);
//JN: Commenting out this, as its creating directories without any reason. TODO: Check why was this added.
// if (!currentDir.isDirectory()) {
// currentDir.mkdirs();
// }
ArrayList fileListRaw = new ArrayList();
fileListRaw = asdfdao.findByDatasetId(datasetId);
fileList = new ArrayList();
Iterator fileIterator = fileListRaw.iterator();
while (fileIterator.hasNext()) {
ArchivedDatasetFileBean asdfBean = (ArchivedDatasetFileBean) fileIterator.next();
// set the correct webPath in each bean here
// changed here, tbh, 4-18
// asdfBean.setWebPath(WEB_DIR+db.getId()+"/"+asdfBean.getName());
// asdfBean.setWebPath(DATASET_DIR+db.getId()+File.separator+
// asdfBean.getName());
asdfBean.setWebPath(asdfBean.getFileReference());
if (new File(asdfBean.getFileReference()).isFile()) {
// logger.warn(asdfBean.getFileReference()+" is a
// file!");
fileList.add(asdfBean);
} else {
logger.warn(asdfBean.getFileReference() + " is NOT a file!");
}
}
logger.warn("");
logger.warn("file list length: " + fileList.size());
request.setAttribute("filelist", fileList);
ArrayList filterRows = ArchivedDatasetFileRow.generateRowsFromBeans(fileList);
EntityBeanTable table = fp.getEntityBeanTable();
// sort by date
table.setSortingIfNotExplicitlySet(3, false);
String[] columns = { resword.getString("file_name"), resword.getString("run_time"), resword.getString("file_size"), resword.getString("created_date"), resword.getString("created_by"), resword.getString("action") };
table.setColumns(new ArrayList(Arrays.asList(columns)));
table.hideColumnLink(0);
table.hideColumnLink(1);
table.hideColumnLink(2);
table.hideColumnLink(3);
table.hideColumnLink(4);
table.hideColumnLink(5);
table.setQuery("ExportDataset?datasetId=" + db.getId(), new HashMap());
// trying to continue...
session.setAttribute("newDataset", db);
table.setRows(filterRows);
table.computeDisplay();
request.setAttribute("table", table);
// for the side info bar
TabReportBean answer = new TabReportBean();
resetPanel();
panel.setStudyInfoShown(false);
setToPanel(resword.getString("study_name"), eb.getStudy().getName());
setToPanel(resword.getString("protocol_ID"), eb.getStudy().getIdentifier());
setToPanel(resword.getString("dataset_name"), db.getName());
setToPanel(resword.getString("created_date"), local_df.format(db.getCreatedDate()));
setToPanel(resword.getString("dataset_owner"), db.getOwner().getName());
try {
// do we not set this or is it null b/c we come to the page with no session?
setToPanel(resword.getString("date_last_run"), local_df.format(db.getDateLastRun()));
} catch (NullPointerException npe) {
logger.error("exception: " + npe.getMessage());
}
logger.warn("just set file list to request, sending to page");
}
Aggregations