use of org.apache.commons.fileupload.FileItemStream in project jena by apache.
the class DataUploader method fileUploadMultipart.
/**
* Process an HTTP upload of RDF files (triples or quads) with content type
* "multipart/form-data" or "multipart/mixed".
* <p>
* Form data (content-disposition: form-data; name="...") is rejected.
* <p>
* Data is streamed straight into the destination graph or dataset.
* <p>
* This function assumes it is inside a transaction.
*/
private static UploadDetails fileUploadMultipart(HttpAction action, StreamRDF dest) {
String base = ActionLib.wholeRequestURL(action.getRequest());
ServletFileUpload upload = new ServletFileUpload();
StreamRDFCounting countingDest = StreamRDFLib.count(dest);
try {
FileItemIterator iter = upload.getItemIterator(action.getRequest());
while (iter.hasNext()) {
FileItemStream fileStream = iter.next();
if (fileStream.isFormField()) {
// Form field - this code only supports multipart file upload.
String fieldName = fileStream.getFieldName();
InputStream stream = fileStream.openStream();
String value = Streams.asString(stream, "UTF-8");
// This code is currently used to put multiple files into a single destination.
// Additional field/values do not make sense.
ServletOps.errorBadRequest(format("Only files accepted in multipart file upload (got %s=%s)", fieldName, value));
// errorBadRequest does not return.
return null;
}
InputStream input = fileStream.openStream();
// Content-Type:
String contentTypeHeader = fileStream.getContentType();
ContentType ct = ContentType.create(contentTypeHeader);
Lang lang = null;
if (!matchContentType(ctTextPlain, ct))
lang = RDFLanguages.contentTypeToLang(ct.getContentTypeStr());
if (lang == null) {
// Not a recognized Content-Type. Look at file extension.
String name = fileStream.getName();
if (name == null || name.equals(""))
ServletOps.errorBadRequest("No name for content - can't determine RDF syntax");
lang = RDFLanguages.pathnameToLang(name);
if (name.endsWith(".gz"))
input = new GZIPInputStream(input);
}
if (lang == null)
// Desperate.
lang = RDFLanguages.RDFXML;
String printfilename = fileStream.getName();
if (printfilename == null || printfilename.equals(""))
printfilename = "<none>";
// count just this step
StreamRDFCounting countingDest2 = StreamRDFLib.count(countingDest);
try {
ActionLib.parse(action, countingDest2, input, lang, base);
UploadDetails details1 = new UploadDetails(countingDest2.count(), countingDest2.countTriples(), countingDest2.countQuads());
action.log.info(format("[%d] Filename: %s, Content-Type=%s, Charset=%s => %s : %s", action.id, printfilename, ct.getContentTypeStr(), ct.getCharset(), lang.getName(), details1.detailsStr()));
} catch (RiotParseException ex) {
action.log.info(format("[%d] Filename: %s, Content-Type=%s, Charset=%s => %s : %s", action.id, printfilename, ct.getContentTypeStr(), ct.getCharset(), lang.getName(), ex.getMessage()));
ActionLib.consumeBody(action);
throw ex;
}
}
} catch (ActionErrorException ex) {
throw ex;
} catch (Exception ex) {
ServletOps.errorOccurred(ex.getMessage());
}
// Overall results.
UploadDetails details = new UploadDetails(countingDest.count(), countingDest.countTriples(), countingDest.countQuads());
return details;
}
use of org.apache.commons.fileupload.FileItemStream in project stanbol by apache.
the class ContentItemReader method createContentItem.
/**
* Creates a ContentItem
* @param id the ID or <code>null</code> if not known
* @param metadata the metadata or <code>null</code> if not parsed. NOTE that
* if <code>id == null</code> also <code>metadata == null</code> and
* <code>id != null</code> also <code>metadata != null</code>.
* @param content the {@link FileItemStream} of the MIME part representing
* the content. If {@link FileItemStream#getContentType()} is compatible with
* "multipart/*" than this will further parse for multiple parsed content
* version. In any other case the contents of the parsed {@link FileItemStream}
* will be directly add as content for the {@link ContentItem} created by
* this method.
* @param parsedContentParts used to add the IDs of parsed contentParts
* @return the created content item
* @throws IOException on any error while accessing the contents of the parsed
* {@link FileItemStream}
* @throws FileUploadException if the parsed contents are not correctly
* encoded Multipart MIME
*/
private ContentItem createContentItem(IRI id, Graph metadata, FileItemStream content, Set<String> parsedContentParts) throws IOException, FileUploadException {
MediaType partContentType = MediaType.valueOf(content.getContentType());
ContentItem contentItem = null;
ContentItemFactory ciFactory = getContentItemFactory();
if (MULTIPART.isCompatible(partContentType)) {
log.debug(" - multiple (alternate) ContentParts");
// multiple contentParts are parsed
FileItemIterator contentPartIterator = fu.getItemIterator(new MessageBodyReaderContext(content.openStream(), partContentType));
while (contentPartIterator.hasNext()) {
FileItemStream fis = contentPartIterator.next();
if (contentItem == null) {
log.debug(" - create ContentItem {} for content (type:{})", id, fis.getContentType());
contentItem = ciFactory.createContentItem(id, new StreamSource(fis.openStream(), fis.getContentType()), metadata);
} else {
log.debug(" - create Blob for content (type:{})", fis.getContentType());
Blob blob = ciFactory.createBlob(new StreamSource(fis.openStream(), fis.getContentType()));
IRI contentPartId = null;
if (fis.getFieldName() != null && !fis.getFieldName().isEmpty()) {
contentPartId = new IRI(fis.getFieldName());
} else {
// generating a random ID might break metadata
// TODO maybe we should throw an exception instead
contentPartId = new IRI("urn:contentpart:" + randomUUID());
}
log.debug(" ... add Blob {} to ContentItem {} with content (type:{})", new Object[] { contentPartId, id, fis.getContentType() });
contentItem.addPart(contentPartId, blob);
parsedContentParts.add(contentPartId.getUnicodeString());
}
}
} else {
log.debug(" - create ContentItem {} for content (type:{})", id, content.getContentType());
contentItem = ciFactory.createContentItem(id, new StreamSource(content.openStream(), content.getContentType()), metadata);
}
// add the URI of the main content to the parsed contentParts
parsedContentParts.add(contentItem.getPartUri(0).getUnicodeString());
return contentItem;
}
use of org.apache.commons.fileupload.FileItemStream in project getting-started-java by GoogleCloudPlatform.
the class CreateBookServlet method doPost.
@Override
public void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
assert ServletFileUpload.isMultipartContent(req);
CloudStorageHelper storageHelper = (CloudStorageHelper) getServletContext().getAttribute("storageHelper");
String newImageUrl = null;
Map<String, String> params = new HashMap<String, String>();
try {
FileItemIterator iter = new ServletFileUpload().getItemIterator(req);
while (iter.hasNext()) {
FileItemStream item = iter.next();
if (item.isFormField()) {
params.put(item.getFieldName(), Streams.asString(item.openStream()));
} else if (!Strings.isNullOrEmpty(item.getName())) {
newImageUrl = storageHelper.uploadFile(item, System.getenv("BOOKSHELF_BUCKET"));
}
}
} catch (FileUploadException e) {
throw new IOException(e);
}
String createdByString = "";
String createdByIdString = "";
HttpSession session = req.getSession();
if (session.getAttribute("userEmail") != null) {
// Does the user have a logged in session?
createdByString = (String) session.getAttribute("userEmail");
createdByIdString = (String) session.getAttribute("userId");
}
Book book = new Book.Builder().author(params.get("author")).description(params.get("description")).publishedDate(params.get("publishedDate")).title(params.get("title")).imageUrl(null == newImageUrl ? params.get("imageUrl") : newImageUrl).createdBy(createdByString).createdById(createdByIdString).build();
BookDao dao = (BookDao) this.getServletContext().getAttribute("dao");
String id = dao.createBook(book);
logger.log(Level.INFO, "Created book {0}", book);
resp.sendRedirect("/read?id=" + id);
}
use of org.apache.commons.fileupload.FileItemStream in project getting-started-java by GoogleCloudPlatform.
the class UpdateBookServlet method doPost.
@Override
public void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
BookDao dao = (BookDao) this.getServletContext().getAttribute("dao");
assert ServletFileUpload.isMultipartContent(req);
CloudStorageHelper storageHelper = (CloudStorageHelper) getServletContext().getAttribute("storageHelper");
String newImageUrl = null;
Map<String, String> params = new HashMap<String, String>();
try {
FileItemIterator iter = new ServletFileUpload().getItemIterator(req);
while (iter.hasNext()) {
FileItemStream item = iter.next();
if (item.isFormField()) {
params.put(item.getFieldName(), Streams.asString(item.openStream()));
} else if (!Strings.isNullOrEmpty(item.getName())) {
newImageUrl = storageHelper.uploadFile(item, getServletContext().getInitParameter("bookshelf.bucket"));
}
}
} catch (FileUploadException e) {
throw new IOException(e);
}
try {
Book oldBook = dao.readBook(Long.decode(params.get("id")));
Book book = new Book.Builder().author(params.get("author")).description(params.get("description")).publishedDate(params.get("publishedDate")).title(params.get("title")).imageUrl(null == newImageUrl ? params.get("imageUrl") : newImageUrl).id(Long.decode(params.get("id"))).createdBy(oldBook.getCreatedBy()).createdById(oldBook.getCreatedById()).build();
dao.updateBook(book);
resp.sendRedirect("/read?id=" + params.get("id"));
} catch (Exception e) {
throw new ServletException("Error updating book", e);
}
}
use of org.apache.commons.fileupload.FileItemStream in project BIMserver by opensourceBIM.
the class BulkUploadServlet method service.
@Override
public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (request.getHeader("Origin") != null && !getBimServer().getServerSettingsCache().isHostAllowed(request.getHeader("Origin"))) {
response.setStatus(403);
return;
}
response.setHeader("Access-Control-Allow-Origin", request.getHeader("Origin"));
response.setHeader("Access-Control-Allow-Headers", "Content-Type");
String token = (String) request.getSession().getAttribute("token");
ObjectNode result = OBJECT_MAPPER.createObjectNode();
response.setContentType("text/json");
try {
boolean isMultipart = ServletFileUpload.isMultipartContent(request);
long poid = -1;
String comment = null;
if (isMultipart) {
ServletFileUpload upload = new ServletFileUpload();
FileItemIterator iter = upload.getItemIterator(request);
InputStream in = null;
String name = "";
while (iter.hasNext()) {
FileItemStream item = iter.next();
if (item.isFormField()) {
if ("token".equals(item.getFieldName())) {
token = Streams.asString(item.openStream());
} else if ("poid".equals(item.getFieldName())) {
poid = Long.parseLong(Streams.asString(item.openStream()));
} else if ("comment".equals(item.getFieldName())) {
comment = Streams.asString(item.openStream());
}
} else {
name = item.getName();
in = item.openStream();
if (poid != -1) {
ServiceInterface service = getBimServer().getServiceFactory().get(token, AccessMethod.INTERNAL).get(ServiceInterface.class);
SProject mainProject = service.getProjectByPoid(poid);
ZipInputStream zipInputStream = new ZipInputStream(in);
ZipEntry nextEntry = zipInputStream.getNextEntry();
while (nextEntry != null) {
String fullfilename = nextEntry.getName();
if (fullfilename.toLowerCase().endsWith(".ifc") || fullfilename.toLowerCase().endsWith("ifcxml") || fullfilename.toLowerCase().endsWith(".ifczip")) {
BufferedInputStream bufferedInputStream = new BufferedInputStream(zipInputStream);
byte[] initialBytes = ByteUtils.extractHead(bufferedInputStream, 4096);
InputStreamDataSource inputStreamDataSource = new InputStreamDataSource(new FakeClosingInputStream(bufferedInputStream));
inputStreamDataSource.setName(name);
DataHandler ifcFile = new DataHandler(inputStreamDataSource);
if (fullfilename.contains("/")) {
String path = fullfilename.substring(0, fullfilename.lastIndexOf("/"));
String filename = fullfilename.substring(fullfilename.lastIndexOf("/") + 1);
String extension = filename.substring(filename.lastIndexOf(".") + 1);
try {
String schema = service.determineIfcVersion(initialBytes, fullfilename.toLowerCase().endsWith(".ifczip"));
SProject project = getOrCreatePath(service, mainProject, mainProject, path, schema);
SDeserializerPluginConfiguration deserializer = service.getSuggestedDeserializerForExtension(extension, project.getOid());
service.checkinSync(project.getOid(), comment, deserializer.getOid(), -1L, filename, ifcFile, false);
} catch (Exception e) {
LOGGER.error(e.getMessage() + " (" + fullfilename + ")");
}
}
} else {
if (!nextEntry.isDirectory()) {
LOGGER.info("Unknown fileextenstion " + fullfilename);
}
}
nextEntry = zipInputStream.getNextEntry();
}
// DataHandler ifcFile = new DataHandler(inputStreamDataSource);
//
// if (token != null) {
// if (topicId == -1) {
// long newTopicId = service.checkin(poid, comment, deserializerOid, -1L, name, ifcFile, merge, sync);
// result.put("topicId", newTopicId);
// } else {
// ServiceInterface service = getBimServer().getServiceFactory().get(token, AccessMethod.INTERNAL).get(ServiceInterface.class);
// long newTopicId = service.checkinInitiated(topicId, poid, comment, deserializerOid, -1L, name, ifcFile, merge, true);
// result.put("topicId", newTopicId);
// }
// }
} else {
result.put("exception", "No poid");
}
}
}
}
} catch (Exception e) {
LOGGER.error("", e);
// sendException(response, e);
return;
}
response.getWriter().write(result.toString());
}
Aggregations