use of org.apache.commons.fileupload.disk.DiskFileItemFactory in project pmph by BCSquad.
the class BinaryUploader method save.
public State save(HttpServletRequest request, Map<String, Object> conf) {
FileItemStream fileStream = null;
boolean isAjaxUpload = request.getHeader("X_Requested_With") != null;
if (!ServletFileUpload.isMultipartContent(request)) {
return new BaseState(false, AppInfo.NOT_MULTIPART_CONTENT);
}
ServletFileUpload upload = new ServletFileUpload(new DiskFileItemFactory());
if (isAjaxUpload) {
upload.setHeaderEncoding("UTF-8");
}
try {
FileItemIterator iterator = upload.getItemIterator(request);
while (iterator.hasNext()) {
fileStream = iterator.next();
if (!fileStream.isFormField()) {
break;
}
fileStream = null;
}
if (fileStream == null) {
return new BaseState(false, AppInfo.NOTFOUND_UPLOAD_DATA);
}
String savePath = (String) conf.get("savePath");
String originFileName = fileStream.getName();
String suffix = FileType.getSuffixByFilename(originFileName);
originFileName = originFileName.substring(0, originFileName.length() - suffix.length());
savePath = savePath + suffix;
long maxSize = ((Long) conf.get("maxSize")).longValue();
if (!validType(suffix, (String[]) conf.get("allowFiles"))) {
return new BaseState(false, AppInfo.NOT_ALLOW_FILE_TYPE);
}
savePath = PathFormat.parse(savePath, originFileName);
String physicalPath = (String) conf.get("rootPath") + savePath;
InputStream is = fileStream.openStream();
State storageState = StorageManager.saveFileByInputStream(is, physicalPath, maxSize);
is.close();
if (storageState.isSuccess()) {
ApplicationContext ctx = WebApplicationContextUtils.getWebApplicationContext(request.getSession().getServletContext());
FileService fileService = (FileService) ctx.getBean("fileService");
File file = FileUpload.getFileByFilePath(physicalPath);
String picId = fileService.saveLocalFile(file, ImageType.SYS_MESSAGE, 1L);
storageState.putInfo("url", "/image/" + picId);
// storageState.putInfo("url", PathFormat.format(savePath));
storageState.putInfo("type", suffix);
storageState.putInfo("original", originFileName + suffix);
}
return storageState;
} catch (FileUploadException e) {
return new BaseState(false, AppInfo.PARSE_REQUEST_ERROR);
} catch (IOException e) {
return new BaseState(false, AppInfo.IO_ERROR);
}
}
use of org.apache.commons.fileupload.disk.DiskFileItemFactory in project pinot by linkedin.
the class LLCSegmentCommit method uploadSegment.
boolean uploadSegment(final String instanceId, final String segmentNameStr) {
// 1/ Create a factory for disk-based file items
final DiskFileItemFactory factory = new DiskFileItemFactory();
// 2/ Create a new file upload handler based on the Restlet
// FileUpload extension that will parse Restlet requests and
// generates FileItems.
final RestletFileUpload upload = new RestletFileUpload(factory);
final List<FileItem> items;
try {
// The following statement blocks until the entire segment is read into memory.
items = upload.parseRequest(getRequest());
boolean found = false;
File dataFile = null;
for (final Iterator<FileItem> it = items.iterator(); it.hasNext() && !found; ) {
final FileItem fi = it.next();
if (fi.getFieldName() != null && fi.getFieldName().equals(segmentNameStr)) {
found = true;
dataFile = new File(tempDir, segmentNameStr);
fi.write(dataFile);
}
}
if (!found) {
LOGGER.error("Segment not included in request. Instance {}, segment {}", instanceId, segmentNameStr);
return false;
}
// We will not check for quota here. Instead, committed segments will count towards the quota of a
// table
LLCSegmentName segmentName = new LLCSegmentName(segmentNameStr);
final String rawTableName = segmentName.getTableName();
final File tableDir = new File(baseDataDir, rawTableName);
final File segmentFile = new File(tableDir, segmentNameStr);
synchronized (_pinotHelixResourceManager) {
if (segmentFile.exists()) {
LOGGER.warn("Segment file {} exists. Replacing with upload from {}", segmentNameStr, instanceId);
FileUtils.deleteQuietly(segmentFile);
}
FileUtils.moveFile(dataFile, segmentFile);
}
return true;
} catch (Exception e) {
LOGGER.error("File upload exception from instance {} for segment {}", instanceId, segmentNameStr, e);
}
return false;
}
use of org.apache.commons.fileupload.disk.DiskFileItemFactory in project pinot by linkedin.
the class PinotSegmentUploadRestletResource method post.
@Override
@Post
public Representation post(Representation entity) {
Representation rep = null;
File tmpSegmentDir = null;
File dataFile = null;
try {
// 0/ Get upload type, if it's uri, then download it, otherwise, get the tar from the request.
Series headers = (Series) getRequestAttributes().get(RESTLET_HTTP_HEADERS);
String uploadTypeStr = headers.getFirstValue(FileUploadUtils.UPLOAD_TYPE);
FileUploadType uploadType = null;
try {
uploadType = (uploadTypeStr == null) ? FileUploadType.getDefaultUploadType() : FileUploadType.valueOf(uploadTypeStr);
} catch (Exception e) {
uploadType = FileUploadType.getDefaultUploadType();
}
String downloadURI = null;
boolean found = false;
switch(uploadType) {
case URI:
case JSON:
// Download segment from the given Uri
try {
downloadURI = getDownloadUri(uploadType, headers, entity);
} catch (Exception e) {
String errorMsg = String.format("Failed to get download Uri for upload file type: %s, with error %s", uploadType, e.getMessage());
LOGGER.warn(errorMsg);
JSONObject errorMsgInJson = getErrorMsgInJson(errorMsg);
ControllerRestApplication.getControllerMetrics().addMeteredGlobalValue(ControllerMeter.CONTROLLER_SEGMENT_UPLOAD_ERROR, 1L);
setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
return new StringRepresentation(errorMsgInJson.toJSONString(), MediaType.APPLICATION_JSON);
}
SegmentFetcher segmentFetcher = null;
// Get segmentFetcher based on uri parsed from download uri
try {
segmentFetcher = SegmentFetcherFactory.getSegmentFetcherBasedOnURI(downloadURI);
} catch (Exception e) {
String errorMsg = String.format("Failed to get SegmentFetcher from download Uri: %s", downloadURI);
LOGGER.warn(errorMsg);
JSONObject errorMsgInJson = getErrorMsgInJson(errorMsg);
ControllerRestApplication.getControllerMetrics().addMeteredGlobalValue(ControllerMeter.CONTROLLER_SEGMENT_UPLOAD_ERROR, 1L);
setStatus(Status.SERVER_ERROR_INTERNAL);
return new StringRepresentation(errorMsgInJson.toJSONString(), MediaType.APPLICATION_JSON);
}
// Download segment tar to local.
dataFile = new File(tempDir, "tmp-" + System.nanoTime());
try {
segmentFetcher.fetchSegmentToLocal(downloadURI, dataFile);
} catch (Exception e) {
String errorMsg = String.format("Failed to fetch segment tar from download Uri: %s to %s", downloadURI, dataFile.toString());
LOGGER.warn(errorMsg);
JSONObject errorMsgInJson = getErrorMsgInJson(errorMsg);
ControllerRestApplication.getControllerMetrics().addMeteredGlobalValue(ControllerMeter.CONTROLLER_SEGMENT_UPLOAD_ERROR, 1L);
setStatus(Status.SERVER_ERROR_INTERNAL);
return new StringRepresentation(errorMsgInJson.toJSONString(), MediaType.APPLICATION_JSON);
}
if (dataFile.exists() && dataFile.length() > 0) {
found = true;
}
break;
case TAR:
default:
// 1/ Create a factory for disk-based file items
final DiskFileItemFactory factory = new DiskFileItemFactory();
// 2/ Create a new file upload handler based on the Restlet
// FileUpload extension that will parse Restlet requests and
// generates FileItems.
final RestletFileUpload upload = new RestletFileUpload(factory);
final List<FileItem> items;
// 3/ Request is parsed by the handler which generates a
// list of FileItems
items = upload.parseRequest(getRequest());
for (FileItem fileItem : items) {
if (!found) {
if (fileItem.getFieldName() != null) {
found = true;
dataFile = new File(tempDir, fileItem.getFieldName());
fileItem.write(dataFile);
}
} else {
LOGGER.warn("Got extra file item while pushing segments: " + fileItem.getFieldName());
}
// TODO: remove the try-catch after verifying it will not throw any exception
try {
// Remove the temp file
// When the file is copied to instead of renamed to the new file, the temp file might be left in the dir
fileItem.delete();
} catch (Exception e) {
LOGGER.error("Caught exception while deleting the temp file, should not reach here", e);
}
}
}
// back to the client.
if (found) {
// Create a new representation based on disk file.
// The content is arbitrarily sent as plain text.
rep = new StringRepresentation(dataFile + " sucessfully uploaded", MediaType.TEXT_PLAIN);
tmpSegmentDir = new File(tempUntarredPath, dataFile.getName() + "-" + _controllerConf.getControllerHost() + "_" + _controllerConf.getControllerPort() + "-" + System.currentTimeMillis());
LOGGER.info("Untar segment to temp dir: " + tmpSegmentDir);
if (tmpSegmentDir.exists()) {
FileUtils.deleteDirectory(tmpSegmentDir);
}
if (!tmpSegmentDir.exists()) {
tmpSegmentDir.mkdirs();
}
// While there is TarGzCompressionUtils.unTarOneFile, we use unTar here to unpack all files
// in the segment in order to ensure the segment is not corrupted
TarGzCompressionUtils.unTar(dataFile, tmpSegmentDir);
File segmentFile = tmpSegmentDir.listFiles()[0];
String clientIpAddress = getClientInfo().getAddress();
String clientAddress = InetAddress.getByName(clientIpAddress).getHostName();
LOGGER.info("Processing upload request for segment '{}' from client '{}'", segmentFile.getName(), clientAddress);
return uploadSegment(segmentFile, dataFile, downloadURI);
} else {
// Some problem occurs, sent back a simple line of text.
String errorMsg = "No file was uploaded";
LOGGER.warn(errorMsg);
JSONObject errorMsgInJson = getErrorMsgInJson(errorMsg);
rep = new StringRepresentation(errorMsgInJson.toJSONString(), MediaType.APPLICATION_JSON);
ControllerRestApplication.getControllerMetrics().addMeteredGlobalValue(ControllerMeter.CONTROLLER_SEGMENT_UPLOAD_ERROR, 1L);
setStatus(Status.SERVER_ERROR_INTERNAL);
}
} catch (final Exception e) {
rep = exceptionToStringRepresentation(e);
LOGGER.error("Caught exception in file upload", e);
ControllerRestApplication.getControllerMetrics().addMeteredGlobalValue(ControllerMeter.CONTROLLER_SEGMENT_UPLOAD_ERROR, 1L);
setStatus(Status.SERVER_ERROR_INTERNAL);
} finally {
if ((tmpSegmentDir != null) && tmpSegmentDir.exists()) {
try {
FileUtils.deleteDirectory(tmpSegmentDir);
} catch (final IOException e) {
LOGGER.error("Caught exception in file upload", e);
ControllerRestApplication.getControllerMetrics().addMeteredGlobalValue(ControllerMeter.CONTROLLER_SEGMENT_UPLOAD_ERROR, 1L);
setStatus(Status.SERVER_ERROR_INTERNAL);
}
}
if ((dataFile != null) && dataFile.exists()) {
FileUtils.deleteQuietly(dataFile);
}
}
return rep;
}
use of org.apache.commons.fileupload.disk.DiskFileItemFactory in project JessMA by ldcsaa.
the class FileUploader method getFileUploadComponent.
private ServletFileUpload getFileUploadComponent() {
DiskFileItemFactory dif = new DiskFileItemFactory();
if (factorySizeThreshold != DEFAULT_SIZE_THRESHOLD)
dif.setSizeThreshold(factorySizeThreshold);
if (factoryRepository != null)
dif.setRepository(new File(factoryRepository));
if (factoryCleaningTracker != null)
dif.setFileCleaningTracker(factoryCleaningTracker);
ServletFileUpload sfu = new ServletFileUpload(dif);
if (sizeMax != NO_LIMIT_SIZE_MAX)
sfu.setSizeMax(sizeMax);
if (fileSizeMax != NO_LIMIT_FILE_SIZE_MAX)
sfu.setFileSizeMax(fileSizeMax);
if (servletHeaderencoding != null)
sfu.setHeaderEncoding(servletHeaderencoding);
if (servletProgressListener != null)
sfu.setProgressListener(servletProgressListener);
return sfu;
}
use of org.apache.commons.fileupload.disk.DiskFileItemFactory in project eweb4j-framework by laiweiwei.
the class UploadUtil method handleUpload.
public static void handleUpload(Context context) throws Exception {
ConfigBean cb = (ConfigBean) SingleBeanCache.get(ConfigBean.class.getName());
UploadConfigBean ucb = cb.getMvc().getUpload();
String tmpDir = ucb.getTmp();
int memoryMax = CommonUtil.strToInt(CommonUtil.parseFileSize(ucb.getMaxMemorySize()) + "");
long sizeMax = CommonUtil.parseFileSize(ucb.getMaxRequestSize());
if (tmpDir.trim().length() == 0)
tmpDir = "${RootPath}" + File.separator + "WEB-INF" + File.separator + "tmp";
tmpDir = tmpDir.replace("${RootPath}", ConfigConstant.ROOT_PATH);
DiskFileItemFactory factory = new DiskFileItemFactory();
factory.setSizeThreshold(memoryMax);
factory.setRepository(new File(tmpDir));
ServletFileUpload _upload = new ServletFileUpload(factory);
if (!_upload.isMultipartContent(context.getRequest()))
return;
_upload.setSizeMax(sizeMax);
try {
List<FileItem> items = _upload.parseRequest(context.getRequest());
Iterator<FileItem> it = items.iterator();
while (it.hasNext()) {
FileItem item = it.next();
String fieldName = item.getFieldName();
if (item.isFormField()) {
String value = item.getString();
context.getQueryParamMap().put(fieldName, new String[] { value });
} else {
String fileName = item.getName();
if (fileName == null || fileName.trim().length() == 0)
continue;
String stamp = CommonUtil.getNowTime("yyyyMMddHHmmss");
File tmpFile = new File(tmpDir + File.separator + stamp + "_" + fileName);
item.write(tmpFile);
UploadFile uploadFile = new UploadFile(tmpFile, fileName, fieldName, item.getSize(), item.getContentType());
if (context.getUploadMap().containsKey(fieldName)) {
context.getUploadMap().get(fieldName).add(uploadFile);
} else {
List<UploadFile> uploads = new ArrayList<UploadFile>();
uploads.add(uploadFile);
context.getUploadMap().put(fieldName, uploads);
}
}
}
} catch (InvalidContentTypeException e) {
throw new Exception("upload file error", e);
}
}
Aggregations