use of org.apache.commons.fileupload.FileItemIterator in project endpoints-java by cloudendpoints.
the class RestServletRequestParamReader method read.
@Override
public Object[] read() throws ServiceException {
// TODO: Take charset from content-type as encoding
try {
EndpointMethod method = getMethod();
if (method.getParameterClasses().length == 0) {
return new Object[0];
}
HttpServletRequest servletRequest = endpointsContext.getRequest();
JsonNode node;
// this case, each part represents a named parameter instead.
if (ServletFileUpload.isMultipartContent(servletRequest)) {
try {
ServletFileUpload upload = new ServletFileUpload();
FileItemIterator iter = upload.getItemIterator(servletRequest);
ObjectNode obj = (ObjectNode) objectReader.createObjectNode();
while (iter.hasNext()) {
FileItemStream item = iter.next();
if (item.isFormField()) {
obj.put(item.getFieldName(), IoUtil.readStream(item.openStream()));
} else {
throw new BadRequestException("unable to parse multipart form field");
}
}
node = obj;
} catch (FileUploadException e) {
throw new BadRequestException("unable to parse multipart request", e);
}
} else {
String requestBody = IoUtil.readRequestBody(servletRequest);
logger.atFine().log("requestBody=%s", requestBody);
// Unlike the Lily protocol, which essentially always requires a JSON body to exist (due to
// path and query parameters being injected into the body), bodies are optional here, so we
// create an empty body and inject named parameters to make deserialize work.
node = Strings.isEmptyOrWhitespace(requestBody) ? objectReader.createObjectNode() : objectReader.readTree(requestBody);
}
if (!node.isObject()) {
throw new BadRequestException("expected a JSON object body");
}
ObjectNode body = (ObjectNode) node;
Map<String, Class<?>> parameterMap = getParameterMap(method);
// the order of precedence is resource field > query parameter > path parameter.
for (Enumeration<?> e = servletRequest.getParameterNames(); e.hasMoreElements(); ) {
String parameterName = (String) e.nextElement();
if (!body.has(parameterName)) {
Class<?> parameterClass = parameterMap.get(parameterName);
ApiParameterConfig parameterConfig = parameterConfigMap.get(parameterName);
if (parameterClass != null && parameterConfig.isRepeated()) {
ArrayNode values = body.putArray(parameterName);
for (String value : servletRequest.getParameterValues(parameterName)) {
values.add(value);
}
} else {
body.put(parameterName, servletRequest.getParameterValues(parameterName)[0]);
}
}
}
for (Entry<String, String> entry : rawPathParameters.entrySet()) {
String parameterName = entry.getKey();
Class<?> parameterClass = parameterMap.get(parameterName);
if (parameterClass != null && !body.has(parameterName)) {
if (parameterConfigMap.get(parameterName).isRepeated()) {
ArrayNode values = body.putArray(parameterName);
for (String value : COMPOSITE_PATH_SPLITTER.split(entry.getValue())) {
values.add(value);
}
} else {
body.put(parameterName, entry.getValue());
}
}
}
for (Entry<String, ApiParameterConfig> entry : parameterConfigMap.entrySet()) {
if (!body.has(entry.getKey()) && entry.getValue().getDefaultValue() != null) {
body.put(entry.getKey(), entry.getValue().getDefaultValue());
}
}
return deserializeParams(body);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | IOException e) {
logger.atInfo().withCause(e).log("Unable to read request parameter(s)");
throw new BadRequestException(e);
}
}
use of org.apache.commons.fileupload.FileItemIterator in project jena by apache.
the class Upload method fileUploadWorker.
/** Process an HTTP upload of RDF files (triples or quads)
* Stream straight into a graph or dataset -- unlike SPARQL_Upload the destination
* is known at the start of the multipart file body
*/
public static UploadDetails fileUploadWorker(HttpAction action, StreamRDF dest) {
String base = ActionLib.wholeRequestURL(action.request);
ServletFileUpload upload = new ServletFileUpload();
//log.info(format("[%d] Upload: Field=%s ignored", action.id, fieldName)) ;
// Overall counting.
StreamRDFCounting countingDest = StreamRDFLib.count(dest);
try {
FileItemIterator iter = upload.getItemIterator(action.request);
while (iter.hasNext()) {
FileItemStream fileStream = iter.next();
if (fileStream.isFormField()) {
// Ignore?
String fieldName = fileStream.getFieldName();
InputStream stream = fileStream.openStream();
String value = Streams.asString(stream, "UTF-8");
ServletOps.errorBadRequest(format("Only files accepted in multipart file upload (got %s=%s)", fieldName, value));
}
//Ignore the field name.
//String fieldName = fileStream.getFieldName();
InputStream stream = fileStream.openStream();
// Process the input stream
String contentTypeHeader = fileStream.getContentType();
ContentType ct = ContentType.create(contentTypeHeader);
Lang lang = null;
if (!matchContentType(ctTextPlain, ct))
lang = RDFLanguages.contentTypeToLang(ct.getContentType());
if (lang == null) {
String name = fileStream.getName();
if (name == null || name.equals(""))
ServletOps.errorBadRequest("No name for content - can't determine RDF syntax");
lang = RDFLanguages.filenameToLang(name);
if (name.endsWith(".gz"))
stream = new GZIPInputStream(stream);
}
if (lang == null)
// Desperate.
lang = RDFLanguages.RDFXML;
String printfilename = fileStream.getName();
if (printfilename == null || printfilename.equals(""))
printfilename = "<none>";
// Before
// action.log.info(format("[%d] Filename: %s, Content-Type=%s, Charset=%s => %s",
// action.id, printfilename, ct.getContentType(), ct.getCharset(), lang.getName())) ;
// count just this step
StreamRDFCounting countingDest2 = StreamRDFLib.count(countingDest);
try {
ActionSPARQL.parse(action, countingDest2, stream, lang, base);
UploadDetails details1 = new UploadDetails(countingDest2.count(), countingDest2.countTriples(), countingDest2.countQuads());
action.log.info(format("[%d] Filename: %s, Content-Type=%s, Charset=%s => %s : %s", action.id, printfilename, ct.getContentType(), ct.getCharset(), lang.getName(), details1.detailsStr()));
} catch (RiotParseException ex) {
action.log.info(format("[%d] Filename: %s, Content-Type=%s, Charset=%s => %s : %s", action.id, printfilename, ct.getContentType(), ct.getCharset(), lang.getName(), ex.getMessage()));
throw ex;
}
}
} catch (ActionErrorException ex) {
throw ex;
} catch (Exception ex) {
ServletOps.errorOccurred(ex.getMessage());
}
// Overall results.
UploadDetails details = new UploadDetails(countingDest.count(), countingDest.countTriples(), countingDest.countQuads());
return details;
}
use of org.apache.commons.fileupload.FileItemIterator in project HongsCORE by ihongs.
the class BinaryUploader method save.
public static final State save(HttpServletRequest request, Map<String, Object> conf) {
FileItemStream fileStream = null;
boolean isAjaxUpload = request.getHeader("X_Requested_With") != null;
if (!ServletFileUpload.isMultipartContent(request)) {
return new BaseState(false, AppInfo.NOT_MULTIPART_CONTENT);
}
ServletFileUpload upload = new ServletFileUpload(new DiskFileItemFactory());
if (isAjaxUpload) {
upload.setHeaderEncoding("UTF-8");
}
try {
FileItemIterator iterator = upload.getItemIterator(request);
while (iterator.hasNext()) {
fileStream = iterator.next();
if (!fileStream.isFormField())
break;
fileStream = null;
}
if (fileStream == null) {
return new BaseState(false, AppInfo.NOTFOUND_UPLOAD_DATA);
}
String savePath = (String) conf.get("savePath");
String originFileName = fileStream.getName();
String suffix = FileType.getSuffixByFilename(originFileName);
originFileName = originFileName.substring(0, originFileName.length() - suffix.length());
savePath = savePath + suffix;
long maxSize = ((Long) conf.get("maxSize")).longValue();
if (!validType(suffix, (String[]) conf.get("allowFiles"))) {
return new BaseState(false, AppInfo.NOT_ALLOW_FILE_TYPE);
}
savePath = PathFormat.parse(savePath, originFileName);
// modified by Ternence
String rootPath = ConfigManager.getRootPath(request, conf);
String physicalPath = rootPath + savePath;
InputStream is = fileStream.openStream();
State storageState = StorageManager.saveFileByInputStream(is, physicalPath, maxSize);
is.close();
if (storageState.isSuccess()) {
storageState.putInfo("url", PathFormat.format(savePath));
storageState.putInfo("type", suffix);
storageState.putInfo("original", originFileName + suffix);
}
return storageState;
} catch (FileUploadException e) {
return new BaseState(false, AppInfo.PARSE_REQUEST_ERROR);
} catch (IOException e) {
}
return new BaseState(false, AppInfo.IO_ERROR);
}
use of org.apache.commons.fileupload.FileItemIterator in project opencast by opencast.
the class IngestRestService method addMediaPackageElement.
protected Response addMediaPackageElement(HttpServletRequest request, MediaPackageElement.Type type) {
MediaPackageElementFlavor flavor = null;
InputStream in = null;
try {
String fileName = null;
MediaPackage mp = null;
Long startTime = null;
String[] tags = null;
/* Only accept multipart/form-data */
if (!ServletFileUpload.isMultipartContent(request)) {
logger.trace("request isn't multipart-form-data");
return Response.serverError().status(Status.BAD_REQUEST).build();
}
boolean isDone = false;
for (FileItemIterator iter = new ServletFileUpload().getItemIterator(request); iter.hasNext(); ) {
FileItemStream item = iter.next();
String fieldName = item.getFieldName();
if (item.isFormField()) {
if ("flavor".equals(fieldName)) {
String flavorString = Streams.asString(item.openStream(), "UTF-8");
logger.trace("flavor: {}", flavorString);
if (flavorString != null) {
flavor = MediaPackageElementFlavor.parseFlavor(flavorString);
}
} else if ("tags".equals(fieldName)) {
String tagsString = Streams.asString(item.openStream(), "UTF-8");
logger.trace("tags: {}", tagsString);
tags = tagsString.split(",");
} else if ("mediaPackage".equals(fieldName)) {
try {
String mediaPackageString = Streams.asString(item.openStream(), "UTF-8");
logger.trace("mediaPackage: {}", mediaPackageString);
mp = factory.newMediaPackageBuilder().loadFromXml(mediaPackageString);
} catch (MediaPackageException e) {
logger.debug("Unable to parse the 'mediaPackage' parameter: {}", ExceptionUtils.getMessage(e));
return Response.serverError().status(Status.BAD_REQUEST).build();
}
} else if ("startTime".equals(fieldName) && "/addPartialTrack".equals(request.getPathInfo())) {
String startTimeString = Streams.asString(item.openStream(), "UTF-8");
logger.trace("startTime: {}", startTime);
try {
startTime = Long.parseLong(startTimeString);
} catch (Exception e) {
logger.debug("Unable to parse the 'startTime' parameter: {}", ExceptionUtils.getMessage(e));
return Response.serverError().status(Status.BAD_REQUEST).build();
}
}
} else {
if (flavor == null) {
/* A flavor has to be specified in the request prior the video file */
logger.debug("A flavor has to be specified in the request prior to the content BODY");
return Response.serverError().status(Status.BAD_REQUEST).build();
}
fileName = item.getName();
in = item.openStream();
isDone = true;
}
if (isDone) {
break;
}
}
/*
* Check if we actually got a valid request including a message body and a valid mediapackage to attach the
* element to
*/
if (in == null || mp == null || MediaPackageSupport.sanityCheck(mp).isSome()) {
return Response.serverError().status(Status.BAD_REQUEST).build();
}
switch(type) {
case Attachment:
mp = ingestService.addAttachment(in, fileName, flavor, tags, mp);
break;
case Catalog:
mp = ingestService.addCatalog(in, fileName, flavor, tags, mp);
break;
case Track:
if (startTime == null) {
mp = ingestService.addTrack(in, fileName, flavor, tags, mp);
} else {
mp = ingestService.addPartialTrack(in, fileName, flavor, startTime, mp);
}
break;
default:
throw new IllegalStateException("Type must be one of track, catalog, or attachment");
}
return Response.ok(MediaPackageParser.getAsXml(mp)).build();
} catch (Exception e) {
logger.warn(e.getMessage(), e);
return Response.serverError().status(Status.INTERNAL_SERVER_ERROR).build();
} finally {
IOUtils.closeQuietly(in);
}
}
use of org.apache.commons.fileupload.FileItemIterator in project opencast by opencast.
the class IngestRestService method ingestZippedMediaPackage.
private Response ingestZippedMediaPackage(HttpServletRequest request, String wdID, String wiID) {
if (isIngestLimitEnabled()) {
setIngestLimit(getIngestLimit() - 1);
logger.debug("An ingest has started so remaining ingest limit is " + getIngestLimit());
}
InputStream in = null;
Date started = new Date();
logger.info("Received new request from {} to ingest a zipped mediapackage", request.getRemoteHost());
try {
String workflowDefinitionId = wdID;
String workflowIdAsString = wiID;
Long workflowInstanceIdAsLong = null;
Map<String, String> workflowConfig = new HashMap<>();
if (ServletFileUpload.isMultipartContent(request)) {
boolean isDone = false;
for (FileItemIterator iter = new ServletFileUpload().getItemIterator(request); iter.hasNext(); ) {
FileItemStream item = iter.next();
if (item.isFormField()) {
String fieldName = item.getFieldName();
String value = Streams.asString(item.openStream(), "UTF-8");
logger.trace("{}: {}", fieldName, value);
if (WORKFLOW_INSTANCE_ID_PARAM.equals(fieldName)) {
workflowIdAsString = value;
continue;
} else if (WORKFLOW_DEFINITION_ID_PARAM.equals(fieldName)) {
workflowDefinitionId = value;
continue;
} else {
logger.debug("Processing form field: " + fieldName);
workflowConfig.put(fieldName, value);
}
} else {
logger.debug("Processing file item");
// once the body gets read iter.hasNext must not be invoked or the stream can not be read
// MH-9579
in = item.openStream();
isDone = true;
}
if (isDone)
break;
}
} else {
logger.debug("Processing file item");
in = request.getInputStream();
}
// Adding ingest start time to workflow configuration
DateFormat formatter = new SimpleDateFormat(IngestService.UTC_DATE_FORMAT);
workflowConfig.put(IngestService.START_DATE_KEY, formatter.format(started));
/* Legacy support: Try to convert the workflowId to integer */
if (!StringUtils.isBlank(workflowIdAsString)) {
try {
workflowInstanceIdAsLong = Long.parseLong(workflowIdAsString);
} catch (NumberFormatException e) {
// The workflowId is not a long value and might be the media package identifier
workflowConfig.put(IngestServiceImpl.LEGACY_MEDIAPACKAGE_ID_KEY, workflowIdAsString);
}
}
if (StringUtils.isBlank(workflowDefinitionId)) {
workflowDefinitionId = defaultWorkflowDefinitionId;
}
WorkflowInstance workflow;
if (workflowInstanceIdAsLong != null) {
workflow = ingestService.addZippedMediaPackage(in, workflowDefinitionId, workflowConfig, workflowInstanceIdAsLong);
} else {
workflow = ingestService.addZippedMediaPackage(in, workflowDefinitionId, workflowConfig);
}
return Response.ok(WorkflowParser.toXml(workflow)).build();
} catch (NotFoundException e) {
logger.info(e.getMessage());
return Response.status(Status.NOT_FOUND).build();
} catch (MediaPackageException e) {
logger.warn(e.getMessage());
return Response.serverError().status(Status.BAD_REQUEST).build();
} catch (Exception e) {
logger.warn(e.getMessage(), e);
return Response.serverError().status(Status.INTERNAL_SERVER_ERROR).build();
} finally {
IOUtils.closeQuietly(in);
if (isIngestLimitEnabled()) {
setIngestLimit(getIngestLimit() + 1);
logger.debug("An ingest has finished so increased ingest limit to " + getIngestLimit());
}
}
}
Aggregations