use of org.apache.http.entity.mime.MultipartEntityBuilder in project c4sg-services by Code4SocialGood.
the class SlackUtils method createMultipartFormEntity.
public static HttpEntity createMultipartFormEntity(Map<String, String> parameters, InputStream is) {
MultipartEntityBuilder multipartEntityBuilder = MultipartEntityBuilder.create();
multipartEntityBuilder.setCharset(Charset.forName("UTF-8"));
multipartEntityBuilder.addBinaryBody("file", is, ContentType.create("application/octet-stream"), "file");
for (Entry<String, String> entry : parameters.entrySet()) {
multipartEntityBuilder.addTextBody(entry.getKey(), entry.getValue());
}
return multipartEntityBuilder.build();
}
use of org.apache.http.entity.mime.MultipartEntityBuilder in project android by JetBrains.
the class GoogleCrash method submit.
@Override
@NotNull
public CompletableFuture<String> submit(@NotNull CrashReport report, boolean userReported) {
if (!userReported) {
// all non user reported crash events are rate limited on the client side
if (!myRateLimiter.tryAcquire()) {
CompletableFuture<String> f = new CompletableFuture<>();
f.completeExceptionally(new RuntimeException("Exceeded Quota of crashes that can be reported"));
return f;
}
}
Map<String, String> parameters = getDefaultParameters();
if (report.version != null) {
parameters.put(KEY_VERSION, report.version);
}
parameters.put(KEY_PRODUCT_ID, report.productId);
MultipartEntityBuilder builder = newMultipartEntityBuilderWithKv(parameters);
report.serialize(builder);
return submit(builder.build());
}
use of org.apache.http.entity.mime.MultipartEntityBuilder in project android by JetBrains.
the class GoogleCrash method createPost.
@NotNull
private HttpUriRequest createPost(@NotNull FlightRecorder flightRecorder, @NotNull String issueText, @NotNull List<Path> logFiles) {
HttpPost post = new HttpPost(myCrashUrl);
ApplicationInfo applicationInfo = getApplicationInfo();
String strictVersion = applicationInfo == null ? "0.0.0.0" : applicationInfo.getStrictVersion();
MultipartEntityBuilder builder = MultipartEntityBuilder.create();
// key names recognized by crash
builder.addTextBody(KEY_PRODUCT_ID, "AndroidStudio");
builder.addTextBody(KEY_VERSION, strictVersion);
builder.addTextBody("exception_info", getUniqueStackTrace());
builder.addTextBody("user_report", issueText);
if (ANONYMIZED_UID != null) {
builder.addTextBody("guid", ANONYMIZED_UID);
}
RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean();
builder.addTextBody("ptime", Long.toString(runtimeMXBean.getUptime()));
// product specific key value pairs
builder.addTextBody("fullVersion", applicationInfo == null ? "0.0.0.0" : applicationInfo.getFullVersion());
builder.addTextBody("osName", StringUtil.notNullize(SystemInfo.OS_NAME));
builder.addTextBody("osVersion", StringUtil.notNullize(SystemInfo.OS_VERSION));
builder.addTextBody("osArch", StringUtil.notNullize(SystemInfo.OS_ARCH));
builder.addTextBody("locale", StringUtil.notNullize(LOCALE));
builder.addTextBody("vmName", StringUtil.notNullize(runtimeMXBean.getVmName()));
builder.addTextBody("vmVendor", StringUtil.notNullize(runtimeMXBean.getVmVendor()));
builder.addTextBody("vmVersion", StringUtil.notNullize(runtimeMXBean.getVmVersion()));
MemoryUsage usage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
builder.addTextBody("heapUsed", Long.toString(usage.getUsed()));
builder.addTextBody("heapCommitted", Long.toString(usage.getCommitted()));
builder.addTextBody("heapMax", Long.toString(usage.getMax()));
// add report specific data
builder.addTextBody("Type", "InstantRunFlightRecorder");
addFlightRecorderLogs(builder, flightRecorder, logFiles);
post.setEntity(new GzipCompressingEntity(builder.build()));
return post;
}
use of org.apache.http.entity.mime.MultipartEntityBuilder in project sling by apache.
the class SlingSpecificsSightlyIT method uploadFile.
private void uploadFile(String fileName, String serverFileName, String url) throws IOException {
HttpClient httpClient = HttpClientBuilder.create().build();
HttpPost post = new HttpPost(launchpadURL + url);
post.setHeader("Authorization", "Basic YWRtaW46YWRtaW4=");
MultipartEntityBuilder entityBuilder = MultipartEntityBuilder.create();
InputStreamBody inputStreamBody = new InputStreamBody(this.getClass().getClassLoader().getResourceAsStream(fileName), ContentType.TEXT_PLAIN, fileName);
entityBuilder.addPart(serverFileName, inputStreamBody);
post.setEntity(entityBuilder.build());
httpClient.execute(post);
}
use of org.apache.http.entity.mime.MultipartEntityBuilder in project stanbol by apache.
the class ContentItemWriter method writeTo.
@Override
public void writeTo(ContentItem ci, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
//(0) handle default dataType
Map<String, Object> reqProp = ContentItemHelper.getRequestPropertiesContentPart(ci);
boolean omitMetadata = isOmitMetadata(reqProp);
if (!MULTIPART.isCompatible(mediaType)) {
//two possible cases
if (!omitMetadata) {
// (1) just return the RDF data
//(1.a) Backward support for default dataType if no Accept header is set
StringBuilder ctb = new StringBuilder();
if (mediaType.isWildcardType() || TEXT_PLAIN_TYPE.isCompatible(mediaType) || APPLICATION_OCTET_STREAM_TYPE.isCompatible(mediaType)) {
ctb.append(APPLICATION_LD_JSON);
} else {
ctb.append(mediaType.getType()).append('/').append(mediaType.getSubtype());
}
ctb.append(";charset=").append(UTF8.name());
String contentType = ctb.toString();
httpHeaders.putSingle(HttpHeaders.CONTENT_TYPE, contentType);
try {
serializer.serialize(entityStream, ci.getMetadata(), contentType);
} catch (UnsupportedSerializationFormatException e) {
throw new WebApplicationException("The enhancement results " + "cannot be serialized in the requested media type: " + mediaType.toString(), Response.Status.NOT_ACCEPTABLE);
}
} else {
// (2) return a single content part
Entry<IRI, Blob> contentPart = getBlob(ci, Collections.singleton(mediaType.toString()));
if (contentPart == null) {
//no alternate content with the requeste media type
throw new WebApplicationException("The requested enhancement chain has not created an " + "version of the parsed content in the reuqest media type " + mediaType.toString(), Response.Status.UNSUPPORTED_MEDIA_TYPE);
} else {
//found -> stream the content to the client
//NOTE: This assumes that the presence of a charset
// implies reading/writing character streams
String requestedCharset = mediaType.getParameters().get("charset");
String blobCharset = contentPart.getValue().getParameter().get("charset");
Charset readerCharset = blobCharset == null ? UTF8 : Charset.forName(blobCharset);
Charset writerCharset = requestedCharset == null ? null : Charset.forName(requestedCharset);
if (writerCharset != null && !writerCharset.equals(readerCharset)) {
//we need to transcode
Reader reader = new InputStreamReader(contentPart.getValue().getStream(), readerCharset);
Writer writer = new OutputStreamWriter(entityStream, writerCharset);
IOUtils.copy(reader, writer);
IOUtils.closeQuietly(reader);
} else {
//no transcoding
if (requestedCharset == null && blobCharset != null) {
httpHeaders.putSingle(HttpHeaders.CONTENT_TYPE, mediaType.toString() + "; charset=" + blobCharset);
}
InputStream in = contentPart.getValue().getStream();
IOUtils.copy(in, entityStream);
IOUtils.closeQuietly(in);
}
}
}
} else {
// multipart mime requested!
final String charsetName = mediaType.getParameters().get("charset");
final Charset charset = charsetName != null ? Charset.forName(charsetName) : UTF8;
MediaType rdfFormat;
String rdfFormatString = getRdfFormat(reqProp);
if (rdfFormatString == null || rdfFormatString.isEmpty()) {
rdfFormat = DEFAULT_RDF_FORMAT;
} else {
try {
rdfFormat = MediaType.valueOf(rdfFormatString);
if (rdfFormat.getParameters().get("charset") == null) {
//use the charset of the default RDF format
rdfFormat = new MediaType(rdfFormat.getType(), rdfFormat.getSubtype(), DEFAULT_RDF_FORMAT.getParameters());
}
} catch (IllegalArgumentException e) {
throw new WebApplicationException("The specified RDF format '" + rdfFormatString + "' (used to serialize all RDF parts of " + "multipart MIME responses) is not a well formated MIME type", Response.Status.BAD_REQUEST);
}
}
//(1) setting the correct header
String contentType = String.format("%s/%s; charset=%s; boundary=%s", mediaType.getType(), mediaType.getSubtype(), charset.toString(), CONTENT_ITEM_BOUNDARY);
httpHeaders.putSingle(HttpHeaders.CONTENT_TYPE, contentType);
MultipartEntityBuilder entityBuilder = MultipartEntityBuilder.create();
entityBuilder.setBoundary(CONTENT_ITEM_BOUNDARY);
//(2) serialising the metadata
if (!isOmitMetadata(reqProp)) {
entityBuilder.addPart("metadata", new ClerezzaContentBody(ci.getUri().getUnicodeString(), ci.getMetadata(), rdfFormat));
// entity.addBodyPart(new FormBodyPart("metadata", new ClerezzaContentBody(
// ci.getUri().getUnicodeString(), ci.getMetadata(),
// rdfFormat)));
}
//(3) serialising the Content (Bloby)
//(3.a) Filter based on parameter
List<Entry<IRI, Blob>> includedBlobs = filterBlobs(ci, reqProp);
//(3.b) Serialise the filtered
if (!includedBlobs.isEmpty()) {
Map<String, ContentBody> contentParts = new LinkedHashMap<String, ContentBody>();
for (Entry<IRI, Blob> entry : includedBlobs) {
Blob blob = entry.getValue();
ContentType ct = ContentType.create(blob.getMimeType());
String cs = blob.getParameter().get("charset");
if (StringUtils.isNotBlank(cs)) {
ct = ct.withCharset(cs);
}
contentParts.put(entry.getKey().getUnicodeString(), new InputStreamBody(blob.getStream(), ct));
}
//add all the blobs
entityBuilder.addPart("content", new MultipartContentBody(contentParts, CONTENT_PARTS_BOUNDERY, MULTIPART_ALTERNATE));
}
//else no content to include
Set<String> includeContentParts = getIncludedContentPartURIs(reqProp);
if (includeContentParts != null) {
//(4) serialise the Request Properties
if (includeContentParts.isEmpty() || includeContentParts.contains(REQUEST_PROPERTIES_URI.getUnicodeString())) {
JSONObject object;
try {
object = toJson(reqProp);
} catch (JSONException e) {
String message = "Unable to convert Request Properties " + "to JSON (values : " + reqProp + ")!";
log.error(message, e);
throw new WebApplicationException(message, Response.Status.INTERNAL_SERVER_ERROR);
}
entityBuilder.addTextBody(REQUEST_PROPERTIES_URI.getUnicodeString(), object.toString(), ContentType.APPLICATION_JSON.withCharset(UTF8));
}
//(5) additional RDF metadata stored in contentParts
for (Entry<IRI, Graph> entry : getContentParts(ci, Graph.class).entrySet()) {
if (includeContentParts.isEmpty() || includeContentParts.contains(entry.getKey())) {
entityBuilder.addPart(entry.getKey().getUnicodeString(), new //no file name
ClerezzaContentBody(//no file name
null, entry.getValue(), rdfFormat));
}
// else ignore this content part
}
}
entityBuilder.build().writeTo(entityStream);
}
}
Aggregations