use of org.apache.solr.common.params.ModifiableSolrParams in project lucene-solr by apache.
the class FacetComponent method enqueuePivotFacetShardRequests.
private void enqueuePivotFacetShardRequests(ResponseBuilder rb, int shardNum) {
FacetInfo fi = rb._facetInfo;
ShardRequest shardsRefineRequestPivot = new ShardRequest();
shardsRefineRequestPivot.shards = new String[] { rb.shards[shardNum] };
shardsRefineRequestPivot.params = new ModifiableSolrParams(rb.req.getParams());
// don't request any documents
shardsRefineRequestPivot.params.remove(CommonParams.START);
shardsRefineRequestPivot.params.set(CommonParams.ROWS, "0");
shardsRefineRequestPivot.purpose |= ShardRequest.PURPOSE_REFINE_PIVOT_FACETS;
shardsRefineRequestPivot.params.set(FacetParams.FACET, "true");
removeMainFacetTypeParams(shardsRefineRequestPivot);
shardsRefineRequestPivot.params.set(FacetParams.FACET_PIVOT_MINCOUNT, -1);
shardsRefineRequestPivot.params.remove(FacetParams.FACET_OFFSET);
for (int pivotIndex = 0; pivotIndex < fi.pivotFacets.size(); pivotIndex++) {
String pivotFacetKey = fi.pivotFacets.getName(pivotIndex);
PivotFacet pivotFacet = fi.pivotFacets.getVal(pivotIndex);
List<PivotFacetValue> queuedRefinementsForShard = pivotFacet.getQueuedRefinements(shardNum);
if (!queuedRefinementsForShard.isEmpty()) {
String fieldsKey = PivotFacet.REFINE_PARAM + fi.pivotRefinementCounter;
String command;
if (pivotFacet.localParams != null) {
command = PIVOT_REFINE_PREFIX + fi.pivotRefinementCounter + " " + pivotFacet.facetStr.substring(2);
} else {
command = PIVOT_REFINE_PREFIX + fi.pivotRefinementCounter + "}" + pivotFacet.getKey();
}
shardsRefineRequestPivot.params.add(FacetParams.FACET_PIVOT, command);
for (PivotFacetValue refinementValue : queuedRefinementsForShard) {
String refinementStr = PivotFacetHelper.encodeRefinementValuePath(refinementValue.getValuePath());
shardsRefineRequestPivot.params.add(fieldsKey, refinementStr);
}
}
fi.pivotRefinementCounter++;
}
rb.addRequest(this, shardsRefineRequestPivot);
}
use of org.apache.solr.common.params.ModifiableSolrParams in project lucene-solr by apache.
the class FacetComponent method distributedProcess.
@Override
public int distributedProcess(ResponseBuilder rb) throws IOException {
if (!rb.doFacets) {
return ResponseBuilder.STAGE_DONE;
}
if (rb.stage != ResponseBuilder.STAGE_GET_FIELDS) {
return ResponseBuilder.STAGE_DONE;
}
for (int shardNum = 0; shardNum < rb.shards.length; shardNum++) {
List<String> distribFieldFacetRefinements = null;
// FieldFacetAdditions
for (DistribFieldFacet dff : rb._facetInfo.facets.values()) {
if (!dff.needRefinements)
continue;
List<String> refList = dff._toRefine[shardNum];
if (refList == null || refList.size() == 0)
continue;
// reuse the same key that was used for the
String key = dff.getKey();
// main facet
String termsKey = key + "__terms";
String termsVal = StrUtils.join(refList, ',');
String facetCommand;
// add terms into the original facet.field command
// do it via parameter reference to avoid another layer of encoding.
String termsKeyEncoded = ClientUtils.encodeLocalParamVal(termsKey);
if (dff.localParams != null) {
facetCommand = commandPrefix + termsKeyEncoded + " " + dff.facetStr.substring(2);
} else {
facetCommand = commandPrefix + termsKeyEncoded + '}' + dff.field;
}
if (distribFieldFacetRefinements == null) {
distribFieldFacetRefinements = new ArrayList<>();
}
distribFieldFacetRefinements.add(facetCommand);
distribFieldFacetRefinements.add(termsKey);
distribFieldFacetRefinements.add(termsVal);
}
if (distribFieldFacetRefinements != null) {
String shard = rb.shards[shardNum];
ShardRequest shardsRefineRequest = null;
boolean newRequest = false;
// better scalability.
for (ShardRequest sreq : rb.outgoing) {
if ((sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS) != 0 && sreq.shards != null && sreq.shards.length == 1 && sreq.shards[0].equals(shard)) {
shardsRefineRequest = sreq;
break;
}
}
if (shardsRefineRequest == null) {
// we didn't find any other suitable requests going out to that shard,
// so create one ourselves.
newRequest = true;
shardsRefineRequest = new ShardRequest();
shardsRefineRequest.shards = new String[] { rb.shards[shardNum] };
shardsRefineRequest.params = new ModifiableSolrParams(rb.req.getParams());
// don't request any documents
shardsRefineRequest.params.remove(CommonParams.START);
shardsRefineRequest.params.set(CommonParams.ROWS, "0");
}
shardsRefineRequest.purpose |= ShardRequest.PURPOSE_REFINE_FACETS;
shardsRefineRequest.params.set(FacetParams.FACET, "true");
removeMainFacetTypeParams(shardsRefineRequest);
for (int i = 0; i < distribFieldFacetRefinements.size(); ) {
String facetCommand = distribFieldFacetRefinements.get(i++);
String termsKey = distribFieldFacetRefinements.get(i++);
String termsVal = distribFieldFacetRefinements.get(i++);
shardsRefineRequest.params.add(FacetParams.FACET_FIELD, facetCommand);
shardsRefineRequest.params.set(termsKey, termsVal);
}
if (newRequest) {
rb.addRequest(this, shardsRefineRequest);
}
}
// PivotFacetAdditions
if (doAnyPivotFacetRefinementRequestsExistForShard(rb._facetInfo, shardNum)) {
enqueuePivotFacetShardRequests(rb, shardNum);
}
}
return ResponseBuilder.STAGE_DONE;
}
use of org.apache.solr.common.params.ModifiableSolrParams in project lucene-solr by apache.
the class IndexSchema method readSchema.
protected void readSchema(InputSource is) {
try {
// pass the config resource loader to avoid building an empty one for no reason:
// in the current case though, the stream is valid so we wont load the resource by name
Config schemaConf = new Config(loader, SCHEMA, is, SLASH + SCHEMA + SLASH);
Document document = schemaConf.getDocument();
final XPath xpath = schemaConf.getXPath();
String expression = stepsToPath(SCHEMA, AT + NAME);
Node nd = (Node) xpath.evaluate(expression, document, XPathConstants.NODE);
String coreName = getCoreName("null");
StringBuilder sb = new StringBuilder();
// Another case where the initialization from the test harness is different than the "real world"
sb.append("[");
sb.append(coreName);
sb.append("] ");
if (nd == null) {
sb.append("schema has no name!");
log.warn(sb.toString());
} else {
name = nd.getNodeValue();
sb.append("Schema ");
sb.append(NAME);
sb.append("=");
sb.append(name);
log.info(sb.toString());
}
// /schema/@version
expression = stepsToPath(SCHEMA, AT + VERSION);
version = schemaConf.getFloat(expression, 1.0f);
// load the Field Types
final FieldTypePluginLoader typeLoader = new FieldTypePluginLoader(this, fieldTypes, schemaAware);
expression = getFieldTypeXPathExpressions();
NodeList nodes = (NodeList) xpath.evaluate(expression, document, XPathConstants.NODESET);
typeLoader.load(loader, nodes);
// load the fields
Map<String, Boolean> explicitRequiredProp = loadFields(document, xpath);
// /schema/similarity
expression = stepsToPath(SCHEMA, SIMILARITY);
Node node = (Node) xpath.evaluate(expression, document, XPathConstants.NODE);
similarityFactory = readSimilarity(loader, node);
if (similarityFactory == null) {
final boolean modernSim = getDefaultLuceneMatchVersion().onOrAfter(Version.LUCENE_6_0_0);
final Class simClass = modernSim ? SchemaSimilarityFactory.class : ClassicSimilarityFactory.class;
// use the loader to ensure proper SolrCoreAware handling
similarityFactory = loader.newInstance(simClass.getName(), SimilarityFactory.class);
similarityFactory.init(new ModifiableSolrParams());
} else {
isExplicitSimilarity = true;
}
if (!(similarityFactory instanceof SolrCoreAware)) {
// then we are responsible for erroring if a field type is trying to specify a sim.
for (FieldType ft : fieldTypes.values()) {
if (null != ft.getSimilarity()) {
String msg = "FieldType '" + ft.getTypeName() + "' is configured with a similarity, but the global similarity does not support it: " + similarityFactory.getClass();
log.error(msg);
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
}
}
// /schema/defaultSearchField/text()
expression = stepsToPath(SCHEMA, "defaultSearchField", TEXT_FUNCTION);
node = (Node) xpath.evaluate(expression, document, XPathConstants.NODE);
if (node != null) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Setting defaultSearchField in schema not supported since Solr 7");
}
// /schema/solrQueryParser/@defaultOperator
expression = stepsToPath(SCHEMA, "solrQueryParser", AT + "defaultOperator");
node = (Node) xpath.evaluate(expression, document, XPathConstants.NODE);
if (node != null) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Setting default operator in schema (solrQueryParser/@defaultOperator) not supported");
}
// /schema/uniqueKey/text()
expression = stepsToPath(SCHEMA, UNIQUE_KEY, TEXT_FUNCTION);
node = (Node) xpath.evaluate(expression, document, XPathConstants.NODE);
if (node == null) {
log.warn("no " + UNIQUE_KEY + " specified in schema.");
} else {
uniqueKeyField = getIndexedField(node.getNodeValue().trim());
if (null != uniqueKeyField.getDefaultValue()) {
String msg = UNIQUE_KEY + " field (" + uniqueKeyFieldName + ") can not be configured with a default value (" + uniqueKeyField.getDefaultValue() + ")";
log.error(msg);
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
if (!uniqueKeyField.stored()) {
log.warn(UNIQUE_KEY + " is not stored - distributed search and MoreLikeThis will not work");
}
if (uniqueKeyField.multiValued()) {
String msg = UNIQUE_KEY + " field (" + uniqueKeyFieldName + ") can not be configured to be multivalued";
log.error(msg);
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
uniqueKeyFieldName = uniqueKeyField.getName();
uniqueKeyFieldType = uniqueKeyField.getType();
// Unless the uniqueKeyField is marked 'required=false' then make sure it exists
if (Boolean.FALSE != explicitRequiredProp.get(uniqueKeyFieldName)) {
uniqueKeyField.required = true;
requiredFields.add(uniqueKeyField);
}
}
/////////////// parse out copyField commands ///////////////
// Map<String,ArrayList<SchemaField>> cfields = new HashMap<String,ArrayList<SchemaField>>();
// expression = "/schema/copyField";
dynamicCopyFields = new DynamicCopy[] {};
loadCopyFields(document, xpath);
postReadInform();
} catch (SolrException e) {
throw new SolrException(ErrorCode.getErrorCode(e.code()), "Can't load schema " + loader.resourceLocation(resourceName) + ": " + e.getMessage(), e);
} catch (Exception e) {
// unexpected exception...
throw new SolrException(ErrorCode.SERVER_ERROR, "Can't load schema " + loader.resourceLocation(resourceName) + ": " + e.getMessage(), e);
}
// create the field analyzers
refreshAnalyzers();
log.info("Loaded schema {}/{} with uniqueid field {}", name, version, uniqueKeyFieldName);
}
use of org.apache.solr.common.params.ModifiableSolrParams in project lucene-solr by apache.
the class HttpSolrCall method processAliases.
protected void processAliases(Aliases aliases, List<String> collectionsList) {
String collection = solrReq.getParams().get(COLLECTION_PROP);
if (collection != null) {
collectionsList = StrUtils.splitSmart(collection, ",", true);
}
if (collectionsList != null) {
Set<String> newCollectionsList = new HashSet<>(collectionsList.size());
for (String col : collectionsList) {
String al = aliases.getCollectionAlias(col);
if (al != null) {
List<String> aliasList = StrUtils.splitSmart(al, ",", true);
newCollectionsList.addAll(aliasList);
} else {
newCollectionsList.add(col);
}
}
if (newCollectionsList.size() > 0) {
StringBuilder collectionString = new StringBuilder();
Iterator<String> it = newCollectionsList.iterator();
int sz = newCollectionsList.size();
for (int i = 0; i < sz; i++) {
collectionString.append(it.next());
if (i < newCollectionsList.size() - 1) {
collectionString.append(",");
}
}
ModifiableSolrParams params = new ModifiableSolrParams(solrReq.getParams());
params.set(COLLECTION_PROP, collectionString.toString());
solrReq.setParams(params);
}
}
}
use of org.apache.solr.common.params.ModifiableSolrParams in project lucene-solr by apache.
the class HttpSolrCall method autoCreateSystemColl.
protected void autoCreateSystemColl(String corename) throws Exception {
if (core == null && SYSTEM_COLL.equals(corename) && "POST".equals(req.getMethod()) && !cores.getZkController().getClusterState().hasCollection(SYSTEM_COLL)) {
log.info("Going to auto-create .system collection");
SolrQueryResponse rsp = new SolrQueryResponse();
String repFactor = String.valueOf(Math.min(3, cores.getZkController().getClusterState().getLiveNodes().size()));
cores.getCollectionsHandler().handleRequestBody(new LocalSolrQueryRequest(null, new ModifiableSolrParams().add(ACTION, CREATE.toString()).add(NAME, SYSTEM_COLL).add(REPLICATION_FACTOR, repFactor)), rsp);
if (rsp.getValues().get("success") == null) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Could not auto-create .system collection: " + Utils.toJSONString(rsp.getValues()));
}
TimeOut timeOut = new TimeOut(3, TimeUnit.SECONDS);
for (; ; ) {
if (cores.getZkController().getClusterState().getCollectionOrNull(SYSTEM_COLL) != null) {
break;
} else {
if (timeOut.hasTimedOut()) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Could not find .system collection even after 3 seconds");
}
Thread.sleep(50);
}
}
action = RETRY;
}
}
Aggregations