use of org.apache.solr.common.params.SolrParams in project lucene-solr by apache.
the class LukeRequestHandler method handleRequestBody.
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
IndexSchema schema = req.getSchema();
SolrIndexSearcher searcher = req.getSearcher();
DirectoryReader reader = searcher.getIndexReader();
SolrParams params = req.getParams();
ShowStyle style = ShowStyle.get(params.get("show"));
// If no doc is given, show all fields and top terms
rsp.add("index", getIndexInfo(reader));
if (ShowStyle.INDEX == style) {
// that's all we need
return;
}
Integer docId = params.getInt(DOC_ID);
if (docId == null && params.get(ID) != null) {
// Look for something with a given solr ID
SchemaField uniqueKey = schema.getUniqueKeyField();
String v = uniqueKey.getType().toInternal(params.get(ID));
Term t = new Term(uniqueKey.getName(), v);
docId = searcher.getFirstMatch(t);
if (docId < 0) {
throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "Can't find document: " + params.get(ID));
}
}
// Read the document from the index
if (docId != null) {
if (style != null && style != ShowStyle.DOC) {
throw new SolrException(ErrorCode.BAD_REQUEST, "missing doc param for doc style");
}
Document doc = null;
try {
doc = reader.document(docId);
} catch (Exception ex) {
}
if (doc == null) {
throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "Can't find document: " + docId);
}
SimpleOrderedMap<Object> info = getDocumentFieldsInfo(doc, docId, reader, schema);
SimpleOrderedMap<Object> docinfo = new SimpleOrderedMap<>();
docinfo.add("docId", docId);
docinfo.add("lucene", info);
docinfo.add("solr", doc);
rsp.add("doc", docinfo);
} else if (ShowStyle.SCHEMA == style) {
rsp.add("schema", getSchemaInfo(req.getSchema()));
} else {
rsp.add("fields", getIndexedFieldsInfo(req));
}
// Add some generally helpful information
NamedList<Object> info = new SimpleOrderedMap<>();
info.add("key", getFieldFlagsKey());
info.add("NOTE", "Document Frequency (df) is not updated when a document is marked for deletion. df values include deleted documents.");
rsp.add("info", info);
rsp.setHttpCaching(false);
}
use of org.apache.solr.common.params.SolrParams in project lucene-solr by apache.
the class PluginInfoHandler method handleRequestBody.
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
SolrParams params = req.getParams();
boolean stats = params.getBool("stats", false);
rsp.add("plugins", getSolrInfoBeans(req.getCore(), stats));
rsp.setHttpCaching(false);
}
use of org.apache.solr.common.params.SolrParams in project lucene-solr by apache.
the class SplitOp method execute.
@Override
public void execute(CoreAdminHandler.CallInfo it) throws Exception {
SolrParams params = it.req.getParams();
List<DocRouter.Range> ranges = null;
String[] pathsArr = params.getParams(PATH);
// ranges=a-b,c-d,e-f
String rangesStr = params.get(CoreAdminParams.RANGES);
if (rangesStr != null) {
String[] rangesArr = rangesStr.split(",");
if (rangesArr.length == 0) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "There must be at least one range specified to split an index");
} else {
ranges = new ArrayList<>(rangesArr.length);
for (String r : rangesArr) {
try {
ranges.add(DocRouter.DEFAULT.fromString(r));
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Exception parsing hexadecimal hash range: " + r, e);
}
}
}
}
String splitKey = params.get("split.key");
String[] newCoreNames = params.getParams("targetCore");
String cname = params.get(CoreAdminParams.CORE, "");
if ((pathsArr == null || pathsArr.length == 0) && (newCoreNames == null || newCoreNames.length == 0)) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Either path or targetCore param must be specified");
}
log.info("Invoked split action for core: " + cname);
SolrCore core = it.handler.coreContainer.getCore(cname);
SolrQueryRequest req = new LocalSolrQueryRequest(core, params);
List<SolrCore> newCores = null;
try {
// TODO: allow use of rangesStr in the future
List<String> paths = null;
int partitions = pathsArr != null ? pathsArr.length : newCoreNames.length;
DocRouter router = null;
String routeFieldName = null;
if (it.handler.coreContainer.isZooKeeperAware()) {
ClusterState clusterState = it.handler.coreContainer.getZkController().getClusterState();
String collectionName = req.getCore().getCoreDescriptor().getCloudDescriptor().getCollectionName();
DocCollection collection = clusterState.getCollection(collectionName);
String sliceName = req.getCore().getCoreDescriptor().getCloudDescriptor().getShardId();
Slice slice = collection.getSlice(sliceName);
router = collection.getRouter() != null ? collection.getRouter() : DocRouter.DEFAULT;
if (ranges == null) {
DocRouter.Range currentRange = slice.getRange();
ranges = currentRange != null ? router.partitionRange(partitions, currentRange) : null;
}
// for back-compat with Solr 4.4
Object routerObj = collection.get(DOC_ROUTER);
if (routerObj != null && routerObj instanceof Map) {
Map routerProps = (Map) routerObj;
routeFieldName = (String) routerProps.get("field");
}
}
if (pathsArr == null) {
newCores = new ArrayList<>(partitions);
for (String newCoreName : newCoreNames) {
SolrCore newcore = it.handler.coreContainer.getCore(newCoreName);
if (newcore != null) {
newCores.add(newcore);
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Core with core name " + newCoreName + " expected but doesn't exist.");
}
}
} else {
paths = Arrays.asList(pathsArr);
}
SplitIndexCommand cmd = new SplitIndexCommand(req, paths, newCores, ranges, router, routeFieldName, splitKey);
core.getUpdateHandler().split(cmd);
// After the split has completed, someone (here?) should start the process of replaying the buffered updates.
} catch (Exception e) {
log.error("ERROR executing split:", e);
throw new RuntimeException(e);
} finally {
if (req != null)
req.close();
if (core != null)
core.close();
if (newCores != null) {
for (SolrCore newCore : newCores) {
newCore.close();
}
}
}
}
use of org.apache.solr.common.params.SolrParams in project lucene-solr by apache.
the class StatusOp method execute.
@Override
public void execute(CoreAdminHandler.CallInfo it) throws Exception {
SolrParams params = it.req.getParams();
String cname = params.get(CoreAdminParams.CORE);
String indexInfo = params.get(CoreAdminParams.INDEX_INFO);
boolean isIndexInfoNeeded = Boolean.parseBoolean(null == indexInfo ? "true" : indexInfo);
NamedList<Object> status = new SimpleOrderedMap<>();
Map<String, Exception> failures = new HashMap<>();
for (Map.Entry<String, CoreContainer.CoreLoadFailure> failure : it.handler.coreContainer.getCoreInitFailures().entrySet()) {
failures.put(failure.getKey(), failure.getValue().exception);
}
try {
if (cname == null) {
for (String name : it.handler.coreContainer.getAllCoreNames()) {
status.add(name, CoreAdminOperation.getCoreStatus(it.handler.coreContainer, name, isIndexInfoNeeded));
}
it.rsp.add("initFailures", failures);
} else {
failures = failures.containsKey(cname) ? Collections.singletonMap(cname, failures.get(cname)) : Collections.<String, Exception>emptyMap();
it.rsp.add("initFailures", failures);
status.add(cname, CoreAdminOperation.getCoreStatus(it.handler.coreContainer, cname, isIndexInfoNeeded));
}
it.rsp.add("status", status);
} catch (Exception ex) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error handling 'status' action ", ex);
}
}
use of org.apache.solr.common.params.SolrParams in project lucene-solr by apache.
the class ZookeeperInfoHandler method handleRequestBody.
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
final SolrParams params = req.getParams();
Map<String, String> map = new HashMap<>(1);
map.put(WT, "raw");
map.put(OMIT_HEADER, "true");
req.setParams(SolrParams.wrapDefaults(new MapSolrParams(map), params));
synchronized (this) {
if (pagingSupport == null) {
pagingSupport = new PagedCollectionSupport();
ZkController zkController = cores.getZkController();
if (zkController != null) {
// get notified when the ZK session expires (so we can clear the cached collections and rebuild)
zkController.addOnReconnectListener(pagingSupport);
}
}
}
String path = params.get(PATH);
String addr = params.get("addr");
if (addr != null && addr.length() == 0) {
addr = null;
}
String detailS = params.get("detail");
boolean detail = detailS != null && detailS.equals("true");
String dumpS = params.get("dump");
boolean dump = dumpS != null && dumpS.equals("true");
int start = params.getInt("start", 0);
int rows = params.getInt("rows", -1);
String filterType = params.get("filterType");
if (filterType != null) {
filterType = filterType.trim().toLowerCase(Locale.ROOT);
if (filterType.length() == 0)
filterType = null;
}
FilterType type = (filterType != null) ? FilterType.valueOf(filterType) : FilterType.none;
String filter = (type != FilterType.none) ? params.get("filter") : null;
if (filter != null) {
filter = filter.trim();
if (filter.length() == 0)
filter = null;
}
ZKPrinter printer = new ZKPrinter(cores.getZkController(), addr);
printer.detail = detail;
printer.dump = dump;
boolean isGraphView = "graph".equals(params.get("view"));
printer.page = (isGraphView && "/clusterstate.json".equals(path)) ? new PageOfCollections(start, rows, type, filter) : null;
printer.pagingSupport = pagingSupport;
try {
printer.print(path);
} finally {
printer.close();
}
rsp.getValues().add(RawResponseWriter.CONTENT, printer);
}
Aggregations