use of org.apache.commons.fileupload.disk.DiskFileItemFactory in project pinot by linkedin.
the class PinotSchemaRestletResource method getUploadContents.
private File getUploadContents() throws Exception {
File dataFile = null;
// 1/ Create a factory for disk-based file items
Representation rep;
final DiskFileItemFactory factory = new DiskFileItemFactory();
// 2/ Create a new file upload handler based on the Restlet
// FileUpload extension that will parse Restlet requests and
// generates FileItems.
final RestletFileUpload upload = new RestletFileUpload(factory);
final List<FileItem> items;
// 3/ Request is parsed by the handler which generates a
// list of FileItems
items = upload.parseRequest(getRequest());
final Iterator<FileItem> it = items.iterator();
while (it.hasNext() && dataFile == null) {
final FileItem fi = it.next();
if (fi.getFieldName() != null) {
dataFile = new File(tempDir, fi.getFieldName() + "-" + System.currentTimeMillis());
fi.write(dataFile);
}
}
return dataFile;
}
use of org.apache.commons.fileupload.disk.DiskFileItemFactory in project OpenRefine by OpenRefine.
the class ImportingUtilities method retrieveContentFromPostRequest.
public static void retrieveContentFromPostRequest(HttpServletRequest request, Properties parameters, File rawDataDir, JSONObject retrievalRecord, final Progress progress) throws Exception {
JSONArray fileRecords = new JSONArray();
JSONUtilities.safePut(retrievalRecord, "files", fileRecords);
int clipboardCount = 0;
int uploadCount = 0;
int downloadCount = 0;
int archiveCount = 0;
// This tracks the total progress, which involves uploading data from the client
// as well as downloading data from URLs.
final SavingUpdate update = new SavingUpdate() {
@Override
public void savedMore() {
progress.setProgress(null, calculateProgressPercent(totalExpectedSize, totalRetrievedSize));
}
@Override
public boolean isCanceled() {
return progress.isCanceled();
}
};
DiskFileItemFactory fileItemFactory = new DiskFileItemFactory();
ServletFileUpload upload = new ServletFileUpload(fileItemFactory);
upload.setProgressListener(new ProgressListener() {
boolean setContentLength = false;
long lastBytesRead = 0;
@Override
public void update(long bytesRead, long contentLength, int itemCount) {
if (!setContentLength) {
// Only try to set the content length if we really know it.
if (contentLength >= 0) {
update.totalExpectedSize += contentLength;
setContentLength = true;
}
}
if (setContentLength) {
update.totalRetrievedSize += (bytesRead - lastBytesRead);
lastBytesRead = bytesRead;
update.savedMore();
}
}
});
@SuppressWarnings("unchecked") List<FileItem> tempFiles = (List<FileItem>) upload.parseRequest(request);
progress.setProgress("Uploading data ...", -1);
parts: for (FileItem fileItem : tempFiles) {
if (progress.isCanceled()) {
break;
}
InputStream stream = fileItem.getInputStream();
String name = fileItem.getFieldName().toLowerCase();
if (fileItem.isFormField()) {
if (name.equals("clipboard")) {
String encoding = request.getCharacterEncoding();
if (encoding == null) {
encoding = "UTF-8";
}
File file = allocateFile(rawDataDir, "clipboard.txt");
JSONObject fileRecord = new JSONObject();
JSONUtilities.safePut(fileRecord, "origin", "clipboard");
JSONUtilities.safePut(fileRecord, "declaredEncoding", encoding);
JSONUtilities.safePut(fileRecord, "declaredMimeType", (String) null);
JSONUtilities.safePut(fileRecord, "format", "text");
JSONUtilities.safePut(fileRecord, "fileName", "(clipboard)");
JSONUtilities.safePut(fileRecord, "location", getRelativePath(file, rawDataDir));
progress.setProgress("Uploading pasted clipboard text", calculateProgressPercent(update.totalExpectedSize, update.totalRetrievedSize));
JSONUtilities.safePut(fileRecord, "size", saveStreamToFile(stream, file, null));
JSONUtilities.append(fileRecords, fileRecord);
clipboardCount++;
} else if (name.equals("download")) {
String urlString = Streams.asString(stream);
URL url = new URL(urlString);
JSONObject fileRecord = new JSONObject();
JSONUtilities.safePut(fileRecord, "origin", "download");
JSONUtilities.safePut(fileRecord, "url", urlString);
for (UrlRewriter rewriter : ImportingManager.urlRewriters) {
Result result = rewriter.rewrite(urlString);
if (result != null) {
urlString = result.rewrittenUrl;
url = new URL(urlString);
JSONUtilities.safePut(fileRecord, "url", urlString);
JSONUtilities.safePut(fileRecord, "format", result.format);
if (!result.download) {
downloadCount++;
JSONUtilities.append(fileRecords, fileRecord);
continue parts;
}
}
}
if ("http".equals(url.getProtocol()) || "https".equals(url.getProtocol())) {
DefaultHttpClient client = new DefaultHttpClient();
DecompressingHttpClient httpclient = new DecompressingHttpClient(client);
HttpGet httpGet = new HttpGet(url.toURI());
httpGet.setHeader("User-Agent", RefineServlet.getUserAgent());
if ("https".equals(url.getProtocol())) {
// HTTPS only - no sending password in the clear over HTTP
String userinfo = url.getUserInfo();
if (userinfo != null) {
int s = userinfo.indexOf(':');
if (s > 0) {
String user = userinfo.substring(0, s);
String pw = userinfo.substring(s + 1, userinfo.length());
client.getCredentialsProvider().setCredentials(new AuthScope(url.getHost(), 443), new UsernamePasswordCredentials(user, pw));
}
}
}
HttpResponse response = httpclient.execute(httpGet);
try {
response.getStatusLine();
HttpEntity entity = response.getEntity();
if (entity == null) {
throw new Exception("No content found in " + url.toString());
}
InputStream stream2 = entity.getContent();
String encoding = null;
if (entity.getContentEncoding() != null) {
encoding = entity.getContentEncoding().getValue();
}
JSONUtilities.safePut(fileRecord, "declaredEncoding", encoding);
String contentType = null;
if (entity.getContentType() != null) {
contentType = entity.getContentType().getValue();
}
JSONUtilities.safePut(fileRecord, "declaredMimeType", contentType);
if (saveStream(stream2, url, rawDataDir, progress, update, fileRecord, fileRecords, entity.getContentLength())) {
archiveCount++;
}
downloadCount++;
EntityUtils.consume(entity);
} finally {
httpGet.releaseConnection();
}
} else {
// Fallback handling for non HTTP connections (only FTP?)
URLConnection urlConnection = url.openConnection();
urlConnection.setConnectTimeout(5000);
urlConnection.connect();
InputStream stream2 = urlConnection.getInputStream();
JSONUtilities.safePut(fileRecord, "declaredEncoding", urlConnection.getContentEncoding());
JSONUtilities.safePut(fileRecord, "declaredMimeType", urlConnection.getContentType());
try {
if (saveStream(stream2, url, rawDataDir, progress, update, fileRecord, fileRecords, urlConnection.getContentLength())) {
archiveCount++;
}
downloadCount++;
} finally {
stream2.close();
}
}
} else {
String value = Streams.asString(stream);
parameters.put(name, value);
// TODO: We really want to store this on the request so it's available for everyone
// request.getParameterMap().put(name, value);
}
} else {
// is file content
String fileName = fileItem.getName();
if (fileName.length() > 0) {
long fileSize = fileItem.getSize();
File file = allocateFile(rawDataDir, fileName);
JSONObject fileRecord = new JSONObject();
JSONUtilities.safePut(fileRecord, "origin", "upload");
JSONUtilities.safePut(fileRecord, "declaredEncoding", request.getCharacterEncoding());
JSONUtilities.safePut(fileRecord, "declaredMimeType", fileItem.getContentType());
JSONUtilities.safePut(fileRecord, "fileName", fileName);
JSONUtilities.safePut(fileRecord, "location", getRelativePath(file, rawDataDir));
progress.setProgress("Saving file " + fileName + " locally (" + formatBytes(fileSize) + " bytes)", calculateProgressPercent(update.totalExpectedSize, update.totalRetrievedSize));
JSONUtilities.safePut(fileRecord, "size", saveStreamToFile(stream, file, null));
if (postProcessRetrievedFile(rawDataDir, file, fileRecord, fileRecords, progress)) {
archiveCount++;
}
uploadCount++;
}
}
stream.close();
}
// Delete all temp files.
for (FileItem fileItem : tempFiles) {
fileItem.delete();
}
JSONUtilities.safePut(retrievalRecord, "uploadCount", uploadCount);
JSONUtilities.safePut(retrievalRecord, "downloadCount", downloadCount);
JSONUtilities.safePut(retrievalRecord, "clipboardCount", clipboardCount);
JSONUtilities.safePut(retrievalRecord, "archiveCount", archiveCount);
}
use of org.apache.commons.fileupload.disk.DiskFileItemFactory in project felix by apache.
the class ServletRequestWrapper method handleMultipart.
private void handleMultipart() throws IOException {
// Create a new file upload handler
final ServletFileUpload upload = new ServletFileUpload();
upload.setSizeMax(this.multipartConfig.multipartMaxRequestSize);
upload.setFileSizeMax(this.multipartConfig.multipartMaxFileSize);
upload.setFileItemFactory(new DiskFileItemFactory(this.multipartConfig.multipartThreshold, new File(this.multipartConfig.multipartLocation)));
// Parse the request
List<FileItem> items = null;
try {
items = upload.parseRequest(new ServletRequestContext(this));
} catch (final FileUploadException fue) {
throw new IOException("Error parsing multipart request", fue);
}
parts = new ArrayList<>();
for (final FileItem item : items) {
parts.add(new Part() {
@Override
public InputStream getInputStream() throws IOException {
return item.getInputStream();
}
@Override
public String getContentType() {
return item.getContentType();
}
@Override
public String getName() {
return item.getFieldName();
}
@Override
public String getSubmittedFileName() {
return item.getName();
}
@Override
public long getSize() {
return item.getSize();
}
@Override
public void write(String fileName) throws IOException {
try {
item.write(new File(fileName));
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public void delete() throws IOException {
item.delete();
}
@Override
public String getHeader(String name) {
return item.getHeaders().getHeader(name);
}
@Override
public Collection<String> getHeaders(String name) {
final List<String> values = new ArrayList<>();
final Iterator<String> iter = item.getHeaders().getHeaders(name);
while (iter.hasNext()) {
values.add(iter.next());
}
return values;
}
@Override
public Collection<String> getHeaderNames() {
final List<String> names = new ArrayList<>();
final Iterator<String> iter = item.getHeaders().getHeaderNames();
while (iter.hasNext()) {
names.add(iter.next());
}
return names;
}
});
}
}
use of org.apache.commons.fileupload.disk.DiskFileItemFactory in project pmph by BCSquad.
the class BinaryUploader method save.
public State save(HttpServletRequest request, Map<String, Object> conf) {
FileItemStream fileStream = null;
boolean isAjaxUpload = request.getHeader("X_Requested_With") != null;
if (!ServletFileUpload.isMultipartContent(request)) {
return new BaseState(false, AppInfo.NOT_MULTIPART_CONTENT);
}
ServletFileUpload upload = new ServletFileUpload(new DiskFileItemFactory());
if (isAjaxUpload) {
upload.setHeaderEncoding("UTF-8");
}
try {
FileItemIterator iterator = upload.getItemIterator(request);
while (iterator.hasNext()) {
fileStream = iterator.next();
if (!fileStream.isFormField()) {
break;
}
fileStream = null;
}
if (fileStream == null) {
return new BaseState(false, AppInfo.NOTFOUND_UPLOAD_DATA);
}
String savePath = (String) conf.get("savePath");
String originFileName = fileStream.getName();
String suffix = FileType.getSuffixByFilename(originFileName);
originFileName = originFileName.substring(0, originFileName.length() - suffix.length());
savePath = savePath + suffix;
long maxSize = ((Long) conf.get("maxSize")).longValue();
if (!validType(suffix, (String[]) conf.get("allowFiles"))) {
return new BaseState(false, AppInfo.NOT_ALLOW_FILE_TYPE);
}
savePath = PathFormat.parse(savePath, originFileName);
String physicalPath = (String) conf.get("rootPath") + savePath;
InputStream is = fileStream.openStream();
State storageState = StorageManager.saveFileByInputStream(is, physicalPath, maxSize);
is.close();
if (storageState.isSuccess()) {
ApplicationContext ctx = WebApplicationContextUtils.getWebApplicationContext(request.getSession().getServletContext());
FileService fileService = (FileService) ctx.getBean("fileService");
File file = FileUpload.getFileByFilePath(physicalPath);
String picId = fileService.saveLocalFile(file, ImageType.SYS_MESSAGE, 1L);
storageState.putInfo("url", "/image/" + picId);
// storageState.putInfo("url", PathFormat.format(savePath));
storageState.putInfo("type", suffix);
storageState.putInfo("original", originFileName + suffix);
}
return storageState;
} catch (FileUploadException e) {
return new BaseState(false, AppInfo.PARSE_REQUEST_ERROR);
} catch (IOException e) {
return new BaseState(false, AppInfo.IO_ERROR);
}
}
use of org.apache.commons.fileupload.disk.DiskFileItemFactory in project fess by codelibs.
the class FessMultipartRequestHandler method createServletFileUpload.
// ===================================================================================
// Create ServletFileUpload
// ========================
protected ServletFileUpload createServletFileUpload(final HttpServletRequest request) {
final DiskFileItemFactory fileItemFactory = createDiskFileItemFactory();
final ServletFileUpload upload = newServletFileUpload(fileItemFactory);
upload.setHeaderEncoding(request.getCharacterEncoding());
upload.setSizeMax(getSizeMax());
return upload;
}
Aggregations