use of edu.harvard.iq.dataverse.engine.command.impl.CreateGuestbookResponseCommand in project dataverse by IQSS.
the class DownloadInstanceWriter method writeTo.
@Override
public void writeTo(DownloadInstance di, Class<?> clazz, Type type, Annotation[] annotation, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream outstream) throws IOException, WebApplicationException {
if (di.getDownloadInfo() != null && di.getDownloadInfo().getDataFile() != null) {
DataAccessRequest daReq = new DataAccessRequest();
DataFile dataFile = di.getDownloadInfo().getDataFile();
StorageIO<DataFile> storageIO = DataAccess.getStorageIO(dataFile, daReq);
if (storageIO != null) {
storageIO.open();
if (di.getConversionParam() != null) {
if (di.getConversionParam().equals("imageThumb") && !dataFile.isHarvested()) {
if ("".equals(di.getConversionParamValue())) {
storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE);
} else {
try {
int size = new Integer(di.getConversionParamValue());
if (size > 0) {
storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, size);
}
} catch (java.lang.NumberFormatException ex) {
storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE);
}
// and, since we now have tabular data files that can
// have thumbnail previews... obviously, we don't want to
// add the variable header to the image stream!
storageIO.setNoVarHeader(Boolean.TRUE);
storageIO.setVarHeader(null);
}
} else if (dataFile.isTabularData()) {
logger.fine("request for tabular data download;");
if (di.getConversionParam().equals("noVarHeader")) {
logger.fine("tabular data with no var header requested");
storageIO.setNoVarHeader(Boolean.TRUE);
storageIO.setVarHeader(null);
} else if (di.getConversionParam().equals("format")) {
if ("original".equals(di.getConversionParamValue())) {
logger.fine("stored original of an ingested file requested");
storageIO = StoredOriginalFile.retreive(storageIO);
} else {
// Other format conversions:
logger.fine("format conversion on a tabular file requested (" + di.getConversionParamValue() + ")");
String requestedMimeType = di.getServiceFormatType(di.getConversionParam(), di.getConversionParamValue());
if (requestedMimeType == null) {
// default mime type, in case real type is unknown;
// (this shouldn't happen in real life - but just in case):
requestedMimeType = "application/octet-stream";
}
storageIO = DataConverter.performFormatConversion(dataFile, storageIO, di.getConversionParamValue(), requestedMimeType);
}
} else if (di.getConversionParam().equals("subset")) {
logger.fine("processing subset request.");
if (di.getExtraArguments() != null && di.getExtraArguments().size() > 0) {
logger.fine("processing extra arguments list of length " + di.getExtraArguments().size());
List<Integer> variablePositionIndex = new ArrayList<>();
String subsetVariableHeader = null;
for (int i = 0; i < di.getExtraArguments().size(); i++) {
DataVariable variable = (DataVariable) di.getExtraArguments().get(i);
if (variable != null) {
if (variable.getDataTable().getDataFile().getId().equals(dataFile.getId())) {
logger.fine("adding variable id " + variable.getId() + " to the list.");
variablePositionIndex.add(variable.getFileOrder());
if (subsetVariableHeader == null) {
subsetVariableHeader = variable.getName();
} else {
subsetVariableHeader = subsetVariableHeader.concat("\t");
subsetVariableHeader = subsetVariableHeader.concat(variable.getName());
}
} else {
logger.warning("variable does not belong to this data file.");
}
}
}
if (variablePositionIndex.size() > 0) {
try {
File tempSubsetFile = File.createTempFile("tempSubsetFile", ".tmp");
TabularSubsetGenerator tabularSubsetGenerator = new TabularSubsetGenerator();
tabularSubsetGenerator.subsetFile(storageIO.getInputStream(), tempSubsetFile.getAbsolutePath(), variablePositionIndex, dataFile.getDataTable().getCaseQuantity(), "\t");
if (tempSubsetFile.exists()) {
FileInputStream subsetStream = new FileInputStream(tempSubsetFile);
long subsetSize = tempSubsetFile.length();
InputStreamIO subsetStreamIO = new InputStreamIO(subsetStream, subsetSize);
logger.fine("successfully created subset output stream.");
subsetVariableHeader = subsetVariableHeader.concat("\n");
subsetStreamIO.setVarHeader(subsetVariableHeader);
String tabularFileName = storageIO.getFileName();
if (tabularFileName != null && tabularFileName.endsWith(".tab")) {
tabularFileName = tabularFileName.replaceAll("\\.tab$", "-subset.tab");
} else if (tabularFileName != null && !"".equals(tabularFileName)) {
tabularFileName = tabularFileName.concat("-subset.tab");
} else {
tabularFileName = "subset.tab";
}
subsetStreamIO.setFileName(tabularFileName);
subsetStreamIO.setMimeType(storageIO.getMimeType());
storageIO = subsetStreamIO;
} else {
storageIO = null;
}
} catch (IOException ioex) {
storageIO = null;
}
}
} else {
logger.fine("empty list of extra arguments.");
}
}
}
if (storageIO == null) {
throw new WebApplicationException(Response.Status.SERVICE_UNAVAILABLE);
}
} else {
if (storageIO instanceof S3AccessIO && !(dataFile.isTabularData()) && isRedirectToS3()) {
// [attempt to] redirect:
String redirect_url_str = ((S3AccessIO) storageIO).generateTemporaryS3Url();
// better exception handling here?
logger.info("Data Access API: direct S3 url: " + redirect_url_str);
URI redirect_uri;
try {
redirect_uri = new URI(redirect_url_str);
} catch (URISyntaxException ex) {
logger.info("Data Access API: failed to create S3 redirect url (" + redirect_url_str + ")");
redirect_uri = null;
}
if (redirect_uri != null) {
// definitely close the (still open) S3 input stream,
// since we are not going to use it. The S3 documentation
// emphasizes that it is very important not to leave these
// lying around un-closed, since they are going to fill
// up the S3 connection pool!
storageIO.getInputStream().close();
// increment the download count, if necessary:
if (di.getGbr() != null) {
try {
logger.fine("writing guestbook response, for an S3 download redirect.");
Command<?> cmd = new CreateGuestbookResponseCommand(di.getDataverseRequestService().getDataverseRequest(), di.getGbr(), di.getGbr().getDataFile().getOwner());
di.getCommand().submit(cmd);
} catch (CommandException e) {
}
}
// finally, issue the redirect:
Response response = Response.seeOther(redirect_uri).build();
logger.info("Issuing redirect to the file location on S3.");
throw new RedirectionException(response);
}
}
}
InputStream instream = storageIO.getInputStream();
if (instream != null) {
// headers:
String fileName = storageIO.getFileName();
String mimeType = storageIO.getMimeType();
// Provide both the "Content-disposition" and "Content-Type" headers,
// to satisfy the widest selection of browsers out there.
httpHeaders.add("Content-disposition", "attachment; filename=\"" + fileName + "\"");
httpHeaders.add("Content-Type", mimeType + "; name=\"" + fileName + "\"");
long contentSize;
boolean useChunkedTransfer = false;
// if ((contentSize = getFileSize(di, storageIO.getVarHeader())) > 0) {
if ((contentSize = getContentSize(storageIO)) > 0) {
logger.fine("Content size (retrieved from the AccessObject): " + contentSize);
httpHeaders.add("Content-Length", contentSize);
} else {
// httpHeaders.add("Transfer-encoding", "chunked");
// useChunkedTransfer = true;
}
// (the httpHeaders map must be modified *before* writing any
// data in the output stream!)
int bufsize;
byte[] bffr = new byte[4 * 8192];
byte[] chunkClose = "\r\n".getBytes();
if (storageIO.getVarHeader() != null) {
if (storageIO.getVarHeader().getBytes().length > 0) {
if (useChunkedTransfer) {
String chunkSizeLine = String.format("%x\r\n", storageIO.getVarHeader().getBytes().length);
outstream.write(chunkSizeLine.getBytes());
}
outstream.write(storageIO.getVarHeader().getBytes());
if (useChunkedTransfer) {
outstream.write(chunkClose);
}
}
}
while ((bufsize = instream.read(bffr)) != -1) {
if (useChunkedTransfer) {
String chunkSizeLine = String.format("%x\r\n", bufsize);
outstream.write(chunkSizeLine.getBytes());
}
outstream.write(bffr, 0, bufsize);
if (useChunkedTransfer) {
outstream.write(chunkClose);
}
}
if (useChunkedTransfer) {
String chunkClosing = "0\r\n\r\n";
outstream.write(chunkClosing.getBytes());
}
logger.fine("di conversion param: " + di.getConversionParam() + ", value: " + di.getConversionParamValue());
if (di.getGbr() != null && !(isThumbnailDownload(di) || isPreprocessedMetadataDownload(di))) {
try {
logger.fine("writing guestbook response.");
Command<?> cmd = new CreateGuestbookResponseCommand(di.getDataverseRequestService().getDataverseRequest(), di.getGbr(), di.getGbr().getDataFile().getOwner());
di.getCommand().submit(cmd);
} catch (CommandException e) {
}
} else {
logger.fine("not writing guestbook response");
}
instream.close();
outstream.close();
return;
}
}
}
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
use of edu.harvard.iq.dataverse.engine.command.impl.CreateGuestbookResponseCommand in project dataverse by IQSS.
the class FileDownloadServiceBean method writeGuestbookResponseRecord.
public void writeGuestbookResponseRecord(GuestbookResponse guestbookResponse) {
try {
CreateGuestbookResponseCommand cmd = new CreateGuestbookResponseCommand(dvRequestService.getDataverseRequest(), guestbookResponse, guestbookResponse.getDataset());
commandEngine.submit(cmd);
} catch (CommandException e) {
// if an error occurs here then download won't happen no need for response recs...
}
}
Aggregations