use of org.apache.solr.common.util.ContentStream in project lucene-solr by apache.
the class DumpRequestHandler method handleRequestBody.
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException {
// Show params
rsp.add("params", req.getParams().toNamedList());
String[] parts = req.getParams().getParams("urlTemplateValues");
if (parts != null && parts.length > 0) {
Map map = new LinkedHashMap<>();
rsp.getValues().add("urlTemplateValues", map);
for (String part : parts) {
map.put(part, req.getPathTemplateValues().get(part));
}
}
String[] returnParams = req.getParams().getParams("param");
if (returnParams != null) {
NamedList params = (NamedList) rsp.getValues().get("params");
for (String returnParam : returnParams) {
String[] vals = req.getParams().getParams(returnParam);
if (vals != null) {
if (vals.length == 1) {
params.add(returnParam, vals[0]);
} else {
params.add(returnParam, vals);
}
}
}
}
if (req.getParams().getBool("getdefaults", false)) {
NamedList def = (NamedList) initArgs.get(PluginInfo.DEFAULTS);
rsp.add("getdefaults", def);
}
if (req.getParams().getBool("initArgs", false)) {
rsp.add("initArgs", initArgs);
}
// Write the streams...
if (req.getContentStreams() != null) {
ArrayList<NamedList<Object>> streams = new ArrayList<>();
// Cycle through each stream
for (ContentStream content : req.getContentStreams()) {
NamedList<Object> stream = new SimpleOrderedMap<>();
stream.add(NAME, content.getName());
stream.add("sourceInfo", content.getSourceInfo());
stream.add("size", content.getSize());
stream.add("contentType", content.getContentType());
Reader reader = content.getReader();
try {
stream.add("stream", IOUtils.toString(reader));
} finally {
reader.close();
}
streams.add(stream);
}
rsp.add("streams", streams);
}
rsp.add("context", req.getContext());
}
use of org.apache.solr.common.util.ContentStream in project lucene-solr by apache.
the class BlobHandler method handleRequestBody.
@Override
public void handleRequestBody(final SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
String httpMethod = req.getHttpMethod();
String path = (String) req.getContext().get("path");
SolrConfigHandler.setWt(req, JSON);
List<String> pieces = StrUtils.splitSmart(path, '/');
String blobName = null;
if (pieces.size() >= 3)
blobName = pieces.get(2);
if ("POST".equals(httpMethod)) {
if (blobName == null || blobName.isEmpty()) {
rsp.add("error", "Name not found");
return;
}
String err = SolrConfigHandler.validateName(blobName);
if (err != null) {
log.warn("no blob name");
rsp.add("error", err);
return;
}
if (req.getContentStreams() == null) {
log.warn("no content stream");
rsp.add("error", "No stream");
return;
}
for (ContentStream stream : req.getContentStreams()) {
ByteBuffer payload = SimplePostTool.inputStreamToByteArray(stream.getStream(), maxSize);
MessageDigest m = MessageDigest.getInstance("MD5");
m.update(payload.array(), payload.position(), payload.limit());
String md5 = new BigInteger(1, m.digest()).toString(16);
TopDocs duplicate = req.getSearcher().search(new TermQuery(new Term("md5", md5)), 1);
if (duplicate.totalHits > 0) {
rsp.add("error", "duplicate entry");
forward(req, null, new MapSolrParams((Map) makeMap("q", "md5:" + md5, "fl", "id,size,version,timestamp,blobName")), rsp);
log.warn("duplicate entry for blob :" + blobName);
return;
}
TopFieldDocs docs = req.getSearcher().search(new TermQuery(new Term("blobName", blobName)), 1, new Sort(new SortField("version", SortField.Type.LONG, true)));
long version = 0;
if (docs.totalHits > 0) {
Document doc = req.getSearcher().doc(docs.scoreDocs[0].doc);
Number n = doc.getField("version").numericValue();
version = n.longValue();
}
version++;
String id = blobName + "/" + version;
Map<String, Object> doc = makeMap(ID, id, "md5", md5, "blobName", blobName, VERSION, version, "timestamp", new Date(), "size", payload.limit(), "blob", payload);
verifyWithRealtimeGet(blobName, version, req, doc);
log.info(StrUtils.formatString("inserting new blob {0} ,size {1}, md5 {2}", doc.get(ID), String.valueOf(payload.limit()), md5));
indexMap(req, rsp, doc);
log.info(" Successfully Added and committed a blob with id {} and size {} ", id, payload.limit());
break;
}
} else {
int version = -1;
if (pieces.size() > 3) {
try {
version = Integer.parseInt(pieces.get(3));
} catch (NumberFormatException e) {
rsp.add("error", "Invalid version" + pieces.get(3));
return;
}
}
if (ReplicationHandler.FILE_STREAM.equals(req.getParams().get(CommonParams.WT))) {
if (blobName == null) {
throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "Please send the request in the format /blob/<blobName>/<version>");
} else {
String q = "blobName:{0}";
if (version != -1)
q = "id:{0}/{1}";
QParser qparser = QParser.getParser(StrUtils.formatString(q, blobName, version), req);
final TopDocs docs = req.getSearcher().search(qparser.parse(), 1, new Sort(new SortField("version", SortField.Type.LONG, true)));
if (docs.totalHits > 0) {
rsp.add(ReplicationHandler.FILE_STREAM, new SolrCore.RawWriter() {
@Override
public void write(OutputStream os) throws IOException {
Document doc = req.getSearcher().doc(docs.scoreDocs[0].doc);
IndexableField sf = doc.getField("blob");
FieldType fieldType = req.getSchema().getField("blob").getType();
ByteBuffer buf = (ByteBuffer) fieldType.toObject(sf);
if (buf == null) {
//should never happen unless a user wrote this document directly
throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "Invalid document . No field called blob");
} else {
os.write(buf.array(), 0, buf.limit());
}
}
});
} else {
throw new SolrException(SolrException.ErrorCode.NOT_FOUND, StrUtils.formatString("Invalid combination of blobName {0} and version {1}", blobName, version));
}
}
} else {
String q = "*:*";
if (blobName != null) {
q = "blobName:{0}";
if (version != -1) {
q = "id:{0}/{1}";
}
}
forward(req, null, new MapSolrParams((Map) makeMap("q", StrUtils.formatString(q, blobName, version), "fl", "id,size,version,timestamp,blobName,md5", SORT, "version desc")), rsp);
}
}
}
use of org.apache.solr.common.util.ContentStream in project lucene-solr by apache.
the class RequestUtil method processParams.
/**
* Set default-ish params on a SolrQueryRequest as well as do standard macro processing and JSON request parsing.
*
* @param handler The search handler this is for (may be null if you don't want this method touching the content streams)
* @param req The request whose params we are interested in
* @param defaults values to be used if no values are specified in the request params
* @param appends values to be appended to those from the request (or defaults) when dealing with multi-val params, or treated as another layer of defaults for singl-val params.
* @param invariants values which will be used instead of any request, or default values, regardless of context.
*/
public static void processParams(SolrRequestHandler handler, SolrQueryRequest req, SolrParams defaults, SolrParams appends, SolrParams invariants) {
boolean searchHandler = handler instanceof SearchHandler;
SolrParams params = req.getParams();
// Handle JSON stream for search requests
if (searchHandler && req.getContentStreams() != null) {
Map<String, String[]> map = MultiMapSolrParams.asMultiMap(params, false);
if (!(params instanceof MultiMapSolrParams || params instanceof ModifiableSolrParams)) {
// need to set params on request since we weren't able to access the original map
params = new MultiMapSolrParams(map);
req.setParams(params);
}
// params from the query string should come after (and hence override) JSON content streams
String[] jsonFromParams = map.remove(JSON);
for (ContentStream cs : req.getContentStreams()) {
String contentType = cs.getContentType();
if (contentType == null || !contentType.contains("/json")) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Bad contentType for search handler :" + contentType + " request=" + req);
}
try {
String jsonString = IOUtils.toString(cs.getReader());
if (jsonString != null) {
MultiMapSolrParams.addParam(JSON, jsonString, map);
}
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Exception reading content stream for request:" + req, e);
}
}
// append existing "json" params
if (jsonFromParams != null) {
for (String json : jsonFromParams) {
MultiMapSolrParams.addParam(JSON, json, map);
}
}
}
String[] jsonS = params.getParams(JSON);
boolean hasAdditions = defaults != null || invariants != null || appends != null || jsonS != null;
// short circuit processing
if (!hasAdditions && !params.getBool("expandMacros", true)) {
// nothing to do...
return;
}
boolean isShard = params.getBool("isShard", false);
Map<String, String[]> newMap = MultiMapSolrParams.asMultiMap(params, hasAdditions);
// The parameters we extract will be propagated anyway.
if (jsonS != null && !isShard) {
for (String json : jsonS) {
getParamsFromJSON(newMap, json);
}
}
// first populate defaults, etc..
if (defaults != null) {
Map<String, String[]> defaultsMap = MultiMapSolrParams.asMultiMap(defaults);
for (Map.Entry<String, String[]> entry : defaultsMap.entrySet()) {
String key = entry.getKey();
if (!newMap.containsKey(key)) {
newMap.put(key, entry.getValue());
}
}
}
if (appends != null) {
Map<String, String[]> appendsMap = MultiMapSolrParams.asMultiMap(appends);
for (Map.Entry<String, String[]> entry : appendsMap.entrySet()) {
String key = entry.getKey();
String[] arr = newMap.get(key);
if (arr == null) {
newMap.put(key, entry.getValue());
} else {
String[] appendArr = entry.getValue();
String[] newArr = new String[arr.length + appendArr.length];
System.arraycopy(arr, 0, newArr, 0, arr.length);
System.arraycopy(appendArr, 0, newArr, arr.length, appendArr.length);
newMap.put(key, newArr);
}
}
}
if (invariants != null) {
newMap.putAll(MultiMapSolrParams.asMultiMap(invariants));
}
if (!isShard) {
// Don't expand macros in shard requests
String[] doMacrosStr = newMap.get("expandMacros");
boolean doMacros = true;
if (doMacrosStr != null) {
doMacros = "true".equals(doMacrosStr[0]);
}
if (doMacros) {
newMap = MacroExpander.expand(newMap);
}
}
// Set these params as soon as possible so if there is an error processing later, things like
// "wt=json" will take effect from the defaults.
// newMap may still change below, but that should be OK
SolrParams newParams = new MultiMapSolrParams(newMap);
req.setParams(newParams);
// For example json.command started to be used in SOLR-6294, and that caused errors here.
if (!searchHandler)
return;
Map<String, Object> json = null;
// Handle JSON body first, so query params will always overlay on that
jsonS = newMap.get(JSON);
if (jsonS != null) {
if (json == null) {
json = new LinkedHashMap<>();
}
mergeJSON(json, JSON, jsonS, new ObjectUtil.ConflictHandler());
}
for (String key : newMap.keySet()) {
// json.nl, json.wrf are existing query parameters
if (key.startsWith("json.") && !("json.nl".equals(key) || "json.wrf".equals(key))) {
if (json == null) {
json = new LinkedHashMap<>();
}
mergeJSON(json, key, newMap.get(key), new ObjectUtil.ConflictHandler());
}
}
// implement compat for existing components...
if (json != null && !isShard) {
for (Map.Entry<String, Object> entry : json.entrySet()) {
String key = entry.getKey();
String out = null;
boolean arr = false;
if ("query".equals(key)) {
out = "q";
} else if ("filter".equals(key)) {
out = "fq";
arr = true;
} else if ("fields".equals(key)) {
out = "fl";
arr = true;
} else if ("offset".equals(key)) {
out = "start";
} else if ("limit".equals(key)) {
out = "rows";
} else if (SORT.equals(key)) {
out = SORT;
} else if ("params".equals(key) || "facet".equals(key)) {
// handled elsewhere
continue;
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown top-level key in JSON request : " + key);
}
Object val = entry.getValue();
if (arr) {
String[] existing = newMap.get(out);
List lst = val instanceof List ? (List) val : null;
int existingSize = existing == null ? 0 : existing.length;
int jsonSize = lst == null ? 1 : lst.size();
String[] newval = new String[existingSize + jsonSize];
for (int i = 0; i < existingSize; i++) {
newval[i] = existing[i];
}
if (lst != null) {
for (int i = 0; i < jsonSize; i++) {
Object v = lst.get(i);
newval[existingSize + i] = v.toString();
}
} else {
newval[newval.length - 1] = val.toString();
}
newMap.put(out, newval);
} else {
newMap.put(out, new String[] { val.toString() });
}
}
}
if (json != null) {
req.setJSON(json);
}
}
use of org.apache.solr.common.util.ContentStream in project lucene-solr by apache.
the class ExtractingRequestHandlerTest method loadLocalFromHandler.
SolrQueryResponse loadLocalFromHandler(String handler, String filename, String... args) throws Exception {
LocalSolrQueryRequest req = (LocalSolrQueryRequest) req(args);
try {
// TODO: stop using locally defined streams once stream.file and
// stream.body work everywhere
List<ContentStream> cs = new ArrayList<>();
cs.add(new ContentStreamBase.FileStream(getFile(filename)));
req.setContentStreams(cs);
return h.queryAndResponse(handler, req);
} finally {
req.close();
}
}
use of org.apache.solr.common.util.ContentStream in project lucene-solr by apache.
the class MetricsCollectorHandler method handleRequestBody.
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
if (coreContainer == null || coreContainer.isShutDown()) {
// silently drop request
return;
}
//log.info("#### " + req.toString());
if (req.getContentStreams() == null) {
// no content
return;
}
for (ContentStream cs : req.getContentStreams()) {
if (cs.getContentType() == null) {
log.warn("Missing content type - ignoring");
continue;
}
ContentStreamLoader loader = loaders.get(cs.getContentType());
if (loader == null) {
throw new SolrException(SolrException.ErrorCode.UNSUPPORTED_MEDIA_TYPE, "Unsupported content type for stream: " + cs.getSourceInfo() + ", contentType=" + cs.getContentType());
}
loader.load(req, rsp, cs, new MetricUpdateProcessor(metricManager));
}
}
Aggregations