use of javax.ws.rs.ServiceUnavailableException in project dataverse by IQSS.
the class PidUtil method queryDoi.
/**
* @throws BadRequestException if user didn't supply a DOI.
*
* @throws NotFoundException if DOI not found in DataCite.
*
* @throws ServiceUnavailableException if non 200 or non 404 response from
* DataCite.
*
* @throws InternalServerErrorException on local misconfiguration such as
* DataCite hostname not in DNS.
*/
public static JsonObjectBuilder queryDoi(String persistentId, String baseUrl, String username, String password) {
try {
// This throws an exception if this is not a DOI, which is the only
// user-supplied param - treat this as a BadRequest in the catch statement.
String doi = acceptOnlyDoi(persistentId);
URL url;
// Other errors are all internal misconfiguration (any problems creating the URL), the
// DOI doesn't exist (404 from DataCite), or problem at DataCite (other non-200 responses).
int status = 0;
HttpURLConnection connection = null;
try {
url = new URL(baseUrl + "/dois/" + doi);
connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod("GET");
String userpass = username + ":" + password;
String basicAuth = "Basic " + new String(Base64.getEncoder().encode(userpass.getBytes()));
connection.setRequestProperty("Authorization", basicAuth);
status = connection.getResponseCode();
} catch (IOException ex) {
// Hostname not in DNS, for example.
throw new InternalServerErrorException(BundleUtil.getStringFromBundle("pids.datacite.errors.noResponseCode", Arrays.asList(baseUrl)));
}
if (status == 404) {
// Could check to see if Dataverse expects the DOI to be registered - that would result in a 404 from Dataverse before having to contact DataCite, and DataCite could still return a 404
throw new NotFoundException("404 (NOT FOUND) from DataCite for DOI " + persistentId);
}
if (status != 200) {
/* We could just send back whatever status code DataCite sends, but we've seen
* DataCite sometimes respond with 403 when the credentials were OK, and their
* 500 error doesn't mean a problem with Dataverse, so wrapping any of them in
* a 503 error, to indicate this is a temporary error, might be the better option. In any case, we need to log the
* issue to be able to debug it.
*/
logger.severe("Received " + status + " error from DataCite for DOI: " + persistentId);
InputStream errorStream = connection.getErrorStream();
if (errorStream != null) {
JsonObject out = Json.createReader(connection.getErrorStream()).readObject();
logger.severe("DataCite error response: " + out.toString());
} else {
logger.severe("No error stream from DataCite");
}
throw new ServiceUnavailableException();
}
JsonObject out;
try {
out = Json.createReader(connection.getInputStream()).readObject();
} catch (IOException ex) {
return Json.createObjectBuilder().add("response", ex.getLocalizedMessage());
}
JsonObject data = out.getJsonObject("data");
String id = data.getString("id");
JsonObject attributes = data.getJsonObject("attributes");
String state = attributes.getString("state");
JsonObjectBuilder ret = Json.createObjectBuilder().add("id", id).add("state", state);
return ret;
} catch (IllegalArgumentException ex) {
throw new BadRequestException(ex.getLocalizedMessage());
}
}
use of javax.ws.rs.ServiceUnavailableException in project dataverse by IQSS.
the class Access method dvCardImage.
@Path("dvCardImage/{dataverseId}")
@GET
@Produces({ "image/png" })
public InputStream dvCardImage(@PathParam("dataverseId") Long dataverseId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/
{
logger.fine("entering dvCardImage");
Dataverse dataverse = dataverseService.find(dataverseId);
if (dataverse == null) {
logger.warning("Preview: Version service could not locate a DatasetVersion object for id " + dataverseId + "!");
return null;
}
String imageThumbFileName = null;
if (dataverse.getDataverseTheme() != null && dataverse.getDataverseTheme().getLogo() != null && !dataverse.getDataverseTheme().getLogo().equals("")) {
File dataverseLogoFile = getLogo(dataverse);
if (dataverseLogoFile != null) {
logger.fine("dvCardImage: logo file found");
String logoThumbNailPath = null;
InputStream in = null;
try {
if (dataverseLogoFile.exists()) {
logoThumbNailPath = ImageThumbConverter.generateImageThumbnailFromFile(dataverseLogoFile.getAbsolutePath(), 48);
if (logoThumbNailPath != null) {
in = new FileInputStream(logoThumbNailPath);
}
}
} catch (Exception ex) {
in = null;
}
if (in != null) {
logger.fine("dvCardImage: successfully obtained thumbnail for dataverse logo.");
return in;
}
}
}
/*
StorageIO thumbnailDataAccess = null;
if (!dataverse.isHarvested()) {
for (Dataset dataset : datasetService.findPublishedByOwnerId(dataverseId)) {
logger.info("dvCardImage: checking dataset "+dataset.getGlobalId());
if (dataset != null) {
DatasetVersion releasedVersion = dataset.getReleasedVersion();
logger.info("dvCardImage: obtained released version "+releasedVersion.getTitle());
thumbnailDataAccess = getThumbnailForDatasetVersion(releasedVersion);
if (thumbnailDataAccess != null) {
logger.info("dvCardImage: obtained thumbnail for the version.");
break;
}
}
}
}
if (thumbnailDataAccess != null && thumbnailDataAccess.getInputStream() != null) {
return thumbnailDataAccess.getInputStream();
}
*/
return null;
}
use of javax.ws.rs.ServiceUnavailableException in project dataverse by IQSS.
the class Access method datafileBundle.
// @EJB
// TODO:
// versions? -- L.A. 4.0 beta 10
@Path("datafile/bundle/{fileId}")
@GET
@Produces({ "application/zip" })
public BundleDownloadInstance datafileBundle(@PathParam("fileId") String fileId, @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("gbrecs") boolean gbrecs, @QueryParam("key") String apiToken, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/
{
GuestbookResponse gbr = null;
DataFile df = findDataFileOrDieWrapper(fileId);
if (apiToken == null || apiToken.equals("")) {
apiToken = headers.getHeaderString(API_KEY_HEADER);
}
// This will throw a ForbiddenException if access isn't authorized:
checkAuthorization(df, apiToken);
if (gbrecs != true && df.isReleased()) {
// Write Guestbook record if not done previously and file is released
User apiTokenUser = findAPITokenUser(apiToken);
gbr = guestbookResponseService.initAPIGuestbookResponse(df.getOwner(), df, session, apiTokenUser);
guestbookResponseService.save(gbr);
MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, df);
mdcLogService.logEntry(entry);
}
DownloadInfo dInfo = new DownloadInfo(df);
BundleDownloadInstance downloadInstance = new BundleDownloadInstance(dInfo);
FileMetadata fileMetadata = null;
if (fileMetadataId == null) {
fileMetadata = df.getFileMetadata();
} else {
fileMetadata = dataFileService.findFileMetadata(fileMetadataId);
}
downloadInstance.setFileCitationEndNote(new DataCitation(fileMetadata).toEndNoteString());
downloadInstance.setFileCitationRIS(new DataCitation(fileMetadata).toRISString());
downloadInstance.setFileCitationBibtex(new DataCitation(fileMetadata).toBibtexString());
ByteArrayOutputStream outStream = null;
outStream = new ByteArrayOutputStream();
Long dfId = df.getId();
try {
ddiExportService.exportDataFile(dfId, outStream, null, null, fileMetadataId);
downloadInstance.setFileDDIXML(outStream.toString());
} catch (Exception ex) {
// if we can't generate the DDI, it's ok;
// we'll just generate the bundle without it.
}
return downloadInstance;
}
use of javax.ws.rs.ServiceUnavailableException in project dataverse by IQSS.
the class DownloadInstanceWriter method writeTo.
@Override
public void writeTo(DownloadInstance di, Class<?> clazz, Type type, Annotation[] annotation, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream outstream) throws IOException, WebApplicationException {
if (di.getDownloadInfo() != null && di.getDownloadInfo().getDataFile() != null) {
DataAccessRequest daReq = new DataAccessRequest();
DataFile dataFile = di.getDownloadInfo().getDataFile();
StorageIO<DataFile> storageIO = DataAccess.getStorageIO(dataFile, daReq);
if (storageIO != null) {
try {
storageIO.open();
} catch (IOException ioex) {
// throw new WebApplicationException(Response.Status.SERVICE_UNAVAILABLE);
logger.log(Level.INFO, "Datafile {0}: Failed to locate and/or open physical file. Error message: {1}", new Object[] { dataFile.getId(), ioex.getLocalizedMessage() });
throw new NotFoundException("Datafile " + dataFile.getId() + ": Failed to locate and/or open physical file.");
}
// by a redirect to remote storage (only supported on S3, as of 5.4):
if (storageIO instanceof S3AccessIO && ((S3AccessIO) storageIO).downloadRedirectEnabled()) {
// Even if the above is true, there are a few cases where a
// redirect is not applicable.
// For example, for a tabular file, we can redirect a request
// for a saved original; but CANNOT if it is a column subsetting
// request (must be streamed in real time locally); or a format
// conversion that hasn't been cached and saved on S3 yet.
boolean redirectSupported = true;
String auxiliaryTag = null;
String auxiliaryType = null;
String auxiliaryFileName = null;
if ("imageThumb".equals(di.getConversionParam())) {
// Can redirect - but only if already generated and cached.
int requestedSize = 0;
if (!"".equals(di.getConversionParamValue())) {
try {
requestedSize = new Integer(di.getConversionParamValue());
} catch (java.lang.NumberFormatException ex) {
// it's ok, the default size will be used.
}
}
auxiliaryTag = ImageThumbConverter.THUMBNAIL_SUFFIX + (requestedSize > 0 ? requestedSize : ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE);
if (isAuxiliaryObjectCached(storageIO, auxiliaryTag)) {
auxiliaryType = ImageThumbConverter.THUMBNAIL_MIME_TYPE;
String fileName = storageIO.getFileName();
if (fileName != null) {
auxiliaryFileName = fileName.replaceAll("\\.[^\\.]*$", ImageThumbConverter.THUMBNAIL_FILE_EXTENSION);
}
} else {
redirectSupported = false;
}
} else if (di.getAuxiliaryFile() != null) {
// We should support redirects to auxiliary files too.
auxiliaryTag = di.getAuxiliaryFile().getFormatTag();
String auxVersion = di.getAuxiliaryFile().getFormatVersion();
if (auxVersion != null) {
auxiliaryTag = auxiliaryTag + "_" + auxVersion;
}
if (isAuxiliaryObjectCached(storageIO, auxiliaryTag)) {
String fileExtension = getFileExtension(di.getAuxiliaryFile());
auxiliaryFileName = storageIO.getFileName() + "." + auxiliaryTag + fileExtension;
auxiliaryType = di.getAuxiliaryFile().getContentType();
} else {
redirectSupported = false;
}
} else if (dataFile.isTabularData()) {
if (di.getConversionParam() != null) {
if (di.getConversionParam().equals("format")) {
if ("original".equals(di.getConversionParamValue())) {
auxiliaryTag = StoredOriginalFile.SAVED_ORIGINAL_FILENAME_EXTENSION;
auxiliaryType = dataFile.getOriginalFileFormat();
auxiliaryFileName = dataFile.getOriginalFileName();
} else {
// format conversions - can redirect, but only if
// it has been cached already.
auxiliaryTag = di.getConversionParamValue();
if (isAuxiliaryObjectCached(storageIO, auxiliaryTag)) {
auxiliaryType = di.getServiceFormatType(di.getConversionParam(), auxiliaryTag);
auxiliaryFileName = FileUtil.replaceExtension(storageIO.getFileName(), auxiliaryTag);
} else {
redirectSupported = false;
}
}
} else if (!di.getConversionParam().equals("noVarHeader")) {
// This is a subset request - can't do.
redirectSupported = false;
}
} else {
redirectSupported = false;
}
}
if (redirectSupported) {
// up the S3 connection pool!
try {
storageIO.getInputStream().close();
} catch (IOException ioex) {
}
// [attempt to] redirect:
String redirect_url_str;
try {
redirect_url_str = ((S3AccessIO) storageIO).generateTemporaryS3Url(auxiliaryTag, auxiliaryType, auxiliaryFileName);
} catch (IOException ioex) {
redirect_url_str = null;
}
if (redirect_url_str == null) {
throw new ServiceUnavailableException();
}
logger.fine("Data Access API: direct S3 url: " + redirect_url_str);
URI redirect_uri;
try {
redirect_uri = new URI(redirect_url_str);
} catch (URISyntaxException ex) {
logger.info("Data Access API: failed to create S3 redirect url (" + redirect_url_str + ")");
redirect_uri = null;
}
if (redirect_uri != null) {
// increment the download count, if necessary:
if (di.getGbr() != null && !(isThumbnailDownload(di) || isPreprocessedMetadataDownload(di))) {
try {
logger.fine("writing guestbook response, for an S3 download redirect.");
Command<?> cmd = new CreateGuestbookResponseCommand(di.getDataverseRequestService().getDataverseRequest(), di.getGbr(), di.getGbr().getDataFile().getOwner());
di.getCommand().submit(cmd);
MakeDataCountEntry entry = new MakeDataCountEntry(di.getRequestUriInfo(), di.getRequestHttpHeaders(), di.getDataverseRequestService(), di.getGbr().getDataFile());
mdcLogService.logEntry(entry);
} catch (CommandException e) {
}
}
// finally, issue the redirect:
Response response = Response.seeOther(redirect_uri).build();
logger.fine("Issuing redirect to the file location on S3.");
throw new RedirectionException(response);
}
throw new ServiceUnavailableException();
}
}
if (di.getConversionParam() != null) {
if (di.getConversionParam().equals("imageThumb") && !dataFile.isHarvested()) {
if ("".equals(di.getConversionParamValue())) {
storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE);
} else {
try {
int size = new Integer(di.getConversionParamValue());
if (size > 0) {
storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, size);
}
} catch (java.lang.NumberFormatException ex) {
storageIO = ImageThumbConverter.getImageThumbnailAsInputStream(storageIO, ImageThumbConverter.DEFAULT_THUMBNAIL_SIZE);
}
// and, since we now have tabular data files that can
// have thumbnail previews... obviously, we don't want to
// add the variable header to the image stream!
storageIO.setNoVarHeader(Boolean.TRUE);
storageIO.setVarHeader(null);
}
} else if (dataFile.isTabularData()) {
logger.fine("request for tabular data download;");
if (di.getConversionParam().equals("noVarHeader")) {
logger.fine("tabular data with no var header requested");
storageIO.setNoVarHeader(Boolean.TRUE);
storageIO.setVarHeader(null);
} else if (di.getConversionParam().equals("format")) {
if ("original".equals(di.getConversionParamValue())) {
logger.fine("stored original of an ingested file requested");
storageIO = StoredOriginalFile.retreive(storageIO);
} else {
// Other format conversions:
logger.fine("format conversion on a tabular file requested (" + di.getConversionParamValue() + ")");
String requestedMimeType = di.getServiceFormatType(di.getConversionParam(), di.getConversionParamValue());
if (requestedMimeType == null) {
// default mime type, in case real type is unknown;
// (this shouldn't happen in real life - but just in case):
requestedMimeType = "application/octet-stream";
}
storageIO = DataConverter.performFormatConversion(dataFile, storageIO, di.getConversionParamValue(), requestedMimeType);
}
} else if (di.getConversionParam().equals("subset")) {
logger.fine("processing subset request.");
// instead.
if (di.getExtraArguments() != null && di.getExtraArguments().size() > 0) {
logger.fine("processing extra arguments list of length " + di.getExtraArguments().size());
List<Integer> variablePositionIndex = new ArrayList<>();
String subsetVariableHeader = null;
for (int i = 0; i < di.getExtraArguments().size(); i++) {
DataVariable variable = (DataVariable) di.getExtraArguments().get(i);
if (variable != null) {
if (variable.getDataTable().getDataFile().getId().equals(dataFile.getId())) {
logger.fine("adding variable id " + variable.getId() + " to the list.");
variablePositionIndex.add(variable.getFileOrder());
if (subsetVariableHeader == null) {
subsetVariableHeader = variable.getName();
} else {
subsetVariableHeader = subsetVariableHeader.concat("\t");
subsetVariableHeader = subsetVariableHeader.concat(variable.getName());
}
} else {
logger.warning("variable does not belong to this data file.");
}
}
}
if (variablePositionIndex.size() > 0) {
try {
File tempSubsetFile = File.createTempFile("tempSubsetFile", ".tmp");
TabularSubsetGenerator tabularSubsetGenerator = new TabularSubsetGenerator();
tabularSubsetGenerator.subsetFile(storageIO.getInputStream(), tempSubsetFile.getAbsolutePath(), variablePositionIndex, dataFile.getDataTable().getCaseQuantity(), "\t");
if (tempSubsetFile.exists()) {
FileInputStream subsetStream = new FileInputStream(tempSubsetFile);
long subsetSize = tempSubsetFile.length();
InputStreamIO subsetStreamIO = new InputStreamIO(subsetStream, subsetSize);
logger.fine("successfully created subset output stream.");
subsetVariableHeader = subsetVariableHeader.concat("\n");
subsetStreamIO.setVarHeader(subsetVariableHeader);
String tabularFileName = storageIO.getFileName();
if (tabularFileName != null && tabularFileName.endsWith(".tab")) {
tabularFileName = tabularFileName.replaceAll("\\.tab$", "-subset.tab");
} else if (tabularFileName != null && !"".equals(tabularFileName)) {
tabularFileName = tabularFileName.concat("-subset.tab");
} else {
tabularFileName = "subset.tab";
}
subsetStreamIO.setFileName(tabularFileName);
subsetStreamIO.setMimeType(storageIO.getMimeType());
storageIO = subsetStreamIO;
} else {
storageIO = null;
}
} catch (IOException ioex) {
storageIO = null;
}
}
} else {
logger.fine("empty list of extra arguments.");
}
}
}
if (storageIO == null) {
// (similarly to what the Access API returns when a thumbnail is requested on a text file, etc.)
throw new NotFoundException("datafile access error: requested optional service (image scaling, format conversion, etc.) could not be performed on this datafile.");
}
} else if (di.getAuxiliaryFile() != null) {
// Make sure to close the InputStream for the main datafile:
try {
storageIO.getInputStream().close();
} catch (IOException ioex) {
}
String auxTag = di.getAuxiliaryFile().getFormatTag();
String auxVersion = di.getAuxiliaryFile().getFormatVersion();
if (auxVersion != null) {
auxTag = auxTag + "_" + auxVersion;
}
long auxFileSize = di.getAuxiliaryFile().getFileSize();
InputStreamIO auxStreamIO = new InputStreamIO(storageIO.getAuxFileAsInputStream(auxTag), auxFileSize);
String fileExtension = getFileExtension(di.getAuxiliaryFile());
auxStreamIO.setFileName(storageIO.getFileName() + "." + auxTag + fileExtension);
auxStreamIO.setMimeType(di.getAuxiliaryFile().getContentType());
storageIO = auxStreamIO;
}
try (InputStream instream = storageIO.getInputStream()) {
if (instream != null) {
// headers:
String fileName = storageIO.getFileName();
String mimeType = storageIO.getMimeType();
// Provide both the "Content-disposition" and "Content-Type" headers,
// to satisfy the widest selection of browsers out there.
// Encode the filename as UTF-8, then deal with spaces. "encode" changes
// a space to + so we change it back to a space (%20).
String finalFileName = URLEncoder.encode(fileName, "UTF-8").replaceAll("\\+", "%20");
httpHeaders.add("Content-disposition", "attachment; filename=\"" + finalFileName + "\"");
httpHeaders.add("Content-Type", mimeType + "; name=\"" + finalFileName + "\"");
long contentSize;
// User may have requested a rangeHeader of bytes.
// Ranges are only supported when the size of the content
// stream is known (i.e., it's not a dynamically generated
// stream.
List<Range> ranges = new ArrayList<>();
String rangeHeader = null;
HttpHeaders headers = di.getRequestHttpHeaders();
if (headers != null) {
rangeHeader = headers.getHeaderString("Range");
}
long offset = 0;
long leftToRead = -1L;
if ((contentSize = getContentSize(storageIO)) > 0) {
try {
ranges = getRanges(rangeHeader, contentSize);
} catch (Exception ex) {
logger.fine("Exception caught processing Range header: " + ex.getLocalizedMessage());
throw new ClientErrorException("Error due to Range header: " + ex.getLocalizedMessage(), Response.Status.REQUESTED_RANGE_NOT_SATISFIABLE);
}
if (ranges.isEmpty()) {
logger.fine("Content size (retrieved from the AccessObject): " + contentSize);
httpHeaders.add("Content-Length", contentSize);
} else {
// For now we only support a single rangeHeader.
long rangeContentSize = ranges.get(0).getLength();
logger.fine("Content size (Range header in use): " + rangeContentSize);
httpHeaders.add("Content-Length", rangeContentSize);
offset = ranges.get(0).getStart();
leftToRead = rangeContentSize;
}
} else {
// We do NOT want to support rangeHeader requests on such streams:
if (rangeHeader != null) {
throw new NotFoundException("Range headers are not supported on dynamically-generated content, such as tabular subsetting.");
}
}
// (the httpHeaders map must be modified *before* writing any
// data in the output stream!)
int bufsize;
byte[] bffr = new byte[4 * 8192];
// subsettable files:
if (storageIO.getVarHeader() != null) {
logger.fine("storageIO.getVarHeader().getBytes().length: " + storageIO.getVarHeader().getBytes().length);
if (storageIO.getVarHeader().getBytes().length > 0) {
// will be written.
if (ranges.isEmpty()) {
logger.fine("writing the entire variable header");
outstream.write(storageIO.getVarHeader().getBytes());
} else {
// Range requested. Since the output stream of a
// tabular file is made up of the varHeader and the body of
// the physical file, we should assume that the requested
// rangeHeader may span any portion of the combined stream.
// Thus we may or may not have to write the header, or a
// portion thereof.
int headerLength = storageIO.getVarHeader().getBytes().length;
if (offset >= headerLength) {
// We can skip the entire header.
// All we need to do is adjust the byte offset
// in the physical file; the number of bytes
// left to write stays unchanged, since we haven't
// written anything.
logger.fine("Skipping the variable header completely.");
offset -= headerLength;
} else {
// some bytes left to write from the main physical file.
if (offset + leftToRead <= headerLength) {
// This is a more straightforward case - we just need to
// write a portion of the header, and then we are done!
logger.fine("Writing this many bytes of the variable header line: " + leftToRead);
outstream.write(Arrays.copyOfRange(storageIO.getVarHeader().getBytes(), (int) offset, (int) offset + (int) leftToRead));
// set "left to read" to zero, indicating that we are done:
leftToRead = 0;
} else {
// write the requested portion of the header:
logger.fine("Writing this many bytes of the variable header line: " + (headerLength - offset));
outstream.write(Arrays.copyOfRange(storageIO.getVarHeader().getBytes(), (int) offset, headerLength));
// and adjust the file offset and remaining number of bytes accordingly:
leftToRead -= (headerLength - offset);
offset = 0;
}
}
}
}
}
// Dynamic streams, etc. Normal operation. No leftToRead.
if (ranges.isEmpty()) {
logger.fine("Normal, non-range request of file id " + dataFile.getId());
while ((bufsize = instream.read(bffr)) != -1) {
outstream.write(bffr, 0, bufsize);
}
} else if (leftToRead > 0) {
// This is a rangeHeader request, and we still have bytes to read
// (for a tabular file, we may have already written enough
// bytes from the variable header!)
storageIO.setOffset(offset);
// Thinking about it, we could just do instream.skip(offset)
// here... But I would like to have this offset functionality
// in StorageIO, for any future cases where we may not
// be able to do that on the stream directly (?) -- L.A.
logger.fine("Range request of file id " + dataFile.getId());
// For now we only support a single rangeHeader.
while ((bufsize = instream.read(bffr)) != -1) {
if ((leftToRead -= bufsize) > 0) {
// Just do a normal write. Potentially lots to go. Don't break.
outstream.write(bffr, 0, bufsize);
} else {
// Get those last bytes or bytes equal to bufsize. Last one. Then break.
outstream.write(bffr, 0, (int) leftToRead + bufsize);
break;
}
}
}
logger.fine("di conversion param: " + di.getConversionParam() + ", value: " + di.getConversionParamValue());
// so these should not produce guestbook entries:
if (di.getGbr() != null && !(isThumbnailDownload(di) || isPreprocessedMetadataDownload(di))) {
try {
logger.fine("writing guestbook response.");
Command<?> cmd = new CreateGuestbookResponseCommand(di.getDataverseRequestService().getDataverseRequest(), di.getGbr(), di.getGbr().getDataFile().getOwner());
di.getCommand().submit(cmd);
MakeDataCountEntry entry = new MakeDataCountEntry(di.getRequestUriInfo(), di.getRequestHttpHeaders(), di.getDataverseRequestService(), di.getGbr().getDataFile());
mdcLogService.logEntry(entry);
} catch (CommandException e) {
}
} else {
logger.fine("not writing guestbook response");
}
outstream.close();
return;
}
}
}
}
throw new NotFoundException();
}
use of javax.ws.rs.ServiceUnavailableException in project dataverse by IQSS.
the class Meta method datafile.
// Because this API is deprecated, we prefer to continue letting it operate on fileId rather adding support for persistent identifiers.
@Deprecated
@Path("datafile/{fileId}")
@GET
@Produces({ "text/xml" })
public String datafile(@PathParam("fileId") Long fileId, @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/
{
String retValue = "";
DataFile dataFile = null;
// httpHeaders.add("Content-disposition", "attachment; filename=\"dataverse_files.zip\"");
// httpHeaders.add("Content-Type", "application/zip; name=\"dataverse_files.zip\"");
response.setHeader("Content-disposition", "attachment; filename=\"dataverse_files.zip\"");
dataFile = datafileService.find(fileId);
if (dataFile == null) {
throw new NotFoundException();
}
String fileName = dataFile.getFileMetadata().getLabel().replaceAll("\\.tab$", "-ddi.xml");
response.setHeader("Content-disposition", "attachment; filename=\"" + fileName + "\"");
response.setHeader("Content-Type", "application/xml; name=\"" + fileName + "\"");
ByteArrayOutputStream outStream = null;
outStream = new ByteArrayOutputStream();
try {
ddiExportService.exportDataFile(fileId, outStream, exclude, include, fileMetadataId);
retValue = outStream.toString();
} catch (Exception e) {
// We return Service Unavailable.
throw new ServiceUnavailableException();
}
return retValue;
}
Aggregations