use of org.apache.solr.schema.SchemaField in project lucene-solr by apache.
the class SchemaCodecFactory method init.
@Override
public void init(NamedList args) {
super.init(args);
assert codec == null;
String compressionModeStr = (String) args.get(COMPRESSION_MODE);
Mode compressionMode;
if (compressionModeStr != null) {
try {
compressionMode = Mode.valueOf(compressionModeStr.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Invalid compressionMode: '" + compressionModeStr + "'. Value must be one of " + Arrays.toString(Mode.values()));
}
log.debug("Using compressionMode: " + compressionMode);
} else {
compressionMode = SOLR_DEFAULT_COMPRESSION_MODE;
log.debug("Using default compressionMode: " + compressionMode);
}
codec = new Lucene70Codec(compressionMode) {
@Override
public PostingsFormat getPostingsFormatForField(String field) {
final SchemaField schemaField = core.getLatestSchema().getFieldOrNull(field);
if (schemaField != null) {
String postingsFormatName = schemaField.getType().getPostingsFormat();
if (postingsFormatName != null) {
return PostingsFormat.forName(postingsFormatName);
}
}
return super.getPostingsFormatForField(field);
}
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
final SchemaField schemaField = core.getLatestSchema().getFieldOrNull(field);
if (schemaField != null) {
String docValuesFormatName = schemaField.getType().getDocValuesFormat();
if (docValuesFormatName != null) {
return DocValuesFormat.forName(docValuesFormatName);
}
}
return super.getDocValuesFormatForField(field);
}
};
}
use of org.apache.solr.schema.SchemaField in project lucene-solr by apache.
the class QueryComponent method unmarshalSortValues.
protected NamedList unmarshalSortValues(SortSpec sortSpec, NamedList sortFieldValues, IndexSchema schema) {
NamedList unmarshalledSortValsPerField = new NamedList();
if (0 == sortFieldValues.size())
return unmarshalledSortValsPerField;
List<SchemaField> schemaFields = sortSpec.getSchemaFields();
SortField[] sortFields = sortSpec.getSort().getSort();
int marshalledFieldNum = 0;
for (int sortFieldNum = 0; sortFieldNum < sortFields.length; sortFieldNum++) {
final SortField sortField = sortFields[sortFieldNum];
final SortField.Type type = sortField.getType();
// :TODO: would be simpler to always serialize every position of SortField[]
if (type == SortField.Type.SCORE || type == SortField.Type.DOC)
continue;
final String sortFieldName = sortField.getField();
final String valueFieldName = sortFieldValues.getName(marshalledFieldNum);
assert sortFieldName.equals(valueFieldName) : "sortFieldValues name key does not match expected SortField.getField";
List sortVals = (List) sortFieldValues.getVal(marshalledFieldNum);
final SchemaField schemaField = schemaFields.get(sortFieldNum);
if (null == schemaField) {
unmarshalledSortValsPerField.add(sortField.getField(), sortVals);
} else {
FieldType fieldType = schemaField.getType();
List unmarshalledSortVals = new ArrayList();
for (Object sortVal : sortVals) {
unmarshalledSortVals.add(fieldType.unmarshalSortValue(sortVal));
}
unmarshalledSortValsPerField.add(sortField.getField(), unmarshalledSortVals);
}
marshalledFieldNum++;
}
return unmarshalledSortValsPerField;
}
use of org.apache.solr.schema.SchemaField in project lucene-solr by apache.
the class QueryElevationComponent method inform.
@Override
public void inform(SolrCore core) {
IndexSchema schema = core.getLatestSchema();
String a = initArgs.get(FIELD_TYPE);
if (a != null) {
FieldType ft = schema.getFieldTypes().get(a);
if (ft == null) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown FieldType: '" + a + "' used in QueryElevationComponent");
}
analyzer = ft.getQueryAnalyzer();
}
SchemaField sf = schema.getUniqueKeyField();
if (sf == null) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "QueryElevationComponent requires the schema to have a uniqueKeyField.");
}
idSchemaFT = sf.getType();
idField = sf.getName();
//register the EditorialMarkerFactory
String excludeName = initArgs.get(QueryElevationParams.EXCLUDE_MARKER_FIELD_NAME, "excluded");
if (excludeName == null || excludeName.equals("") == true) {
excludeName = "excluded";
}
ExcludedMarkerFactory excludedMarkerFactory = new ExcludedMarkerFactory();
core.addTransformerFactory(excludeName, excludedMarkerFactory);
ElevatedMarkerFactory elevatedMarkerFactory = new ElevatedMarkerFactory();
String markerName = initArgs.get(QueryElevationParams.EDITORIAL_MARKER_FIELD_NAME, "elevated");
if (markerName == null || markerName.equals("") == true) {
markerName = "elevated";
}
core.addTransformerFactory(markerName, elevatedMarkerFactory);
forceElevation = initArgs.getBool(QueryElevationParams.FORCE_ELEVATION, forceElevation);
try {
synchronized (elevationCache) {
elevationCache.clear();
String f = initArgs.get(CONFIG_FILE);
if (f == null) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "QueryElevationComponent must specify argument: '" + CONFIG_FILE + "' -- path to elevate.xml");
}
boolean exists = false;
// check if using ZooKeeper
ZkController zkController = core.getCoreContainer().getZkController();
if (zkController != null) {
// TODO : shouldn't have to keep reading the config name when it has been read before
exists = zkController.configFileExists(zkController.getZkStateReader().readConfigName(core.getCoreDescriptor().getCloudDescriptor().getCollectionName()), f);
} else {
File fC = new File(core.getResourceLoader().getConfigDir(), f);
File fD = new File(core.getDataDir(), f);
if (fC.exists() == fD.exists()) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "QueryElevationComponent missing config file: '" + f + "\n" + "either: " + fC.getAbsolutePath() + " or " + fD.getAbsolutePath() + " must exist, but not both.");
}
if (fC.exists()) {
exists = true;
log.info("Loading QueryElevation from: " + fC.getAbsolutePath());
Config cfg = new Config(core.getResourceLoader(), f);
elevationCache.put(null, loadElevationMap(cfg));
}
}
//in other words, we think this is in the data dir, not the conf dir
if (!exists) {
// preload the first data
RefCounted<SolrIndexSearcher> searchHolder = null;
try {
searchHolder = core.getNewestSearcher(false);
IndexReader reader = searchHolder.get().getIndexReader();
getElevationMap(reader, core);
} finally {
if (searchHolder != null)
searchHolder.decref();
}
}
}
} catch (Exception ex) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error initializing QueryElevationComponent.", ex);
}
}
use of org.apache.solr.schema.SchemaField in project lucene-solr by apache.
the class QueryElevationComponent method modifySortSpec.
private SortSpec modifySortSpec(SortSpec current, boolean force, ElevationComparatorSource comparator) {
boolean modify = false;
SortField[] currentSorts = current.getSort().getSort();
List<SchemaField> currentFields = current.getSchemaFields();
ArrayList<SortField> sorts = new ArrayList<>(currentSorts.length + 1);
List<SchemaField> fields = new ArrayList<>(currentFields.size() + 1);
// Perhaps force it to always sort by score
if (force && currentSorts[0].getType() != SortField.Type.SCORE) {
sorts.add(new SortField("_elevate_", comparator, true));
fields.add(null);
modify = true;
}
for (int i = 0; i < currentSorts.length; i++) {
SortField sf = currentSorts[i];
if (sf.getType() == SortField.Type.SCORE) {
sorts.add(new SortField("_elevate_", comparator, !sf.getReverse()));
fields.add(null);
modify = true;
}
sorts.add(sf);
fields.add(currentFields.get(i));
}
if (modify) {
SortSpec newSpec = new SortSpec(new Sort(sorts.toArray(new SortField[sorts.size()])), fields, current.getCount(), current.getOffset());
return newSpec;
}
return null;
}
use of org.apache.solr.schema.SchemaField in project lucene-solr by apache.
the class PivotFacetProcessor method doPivots.
/**
* Recursive function to compute all the pivot counts for the values under the specified field
*/
protected List<NamedList<Object>> doPivots(NamedList<Integer> superFacets, String field, String subField, Deque<String> fnames, Deque<String> vnames, ParsedParams parsed, List<StatsField> statsFields, List<FacetComponent.FacetBase> facetQueries, List<RangeFacetRequest> facetRanges) throws IOException {
boolean isShard = rb.req.getParams().getBool(ShardParams.IS_SHARD, false);
SolrIndexSearcher searcher = rb.req.getSearcher();
// TODO: optimize to avoid converting to an external string and then having to convert back to internal below
SchemaField sfield = searcher.getSchema().getField(field);
FieldType ftype = sfield.getType();
String nextField = fnames.poll();
// re-usable BytesRefBuilder for conversion of term values to Objects
BytesRefBuilder termval = new BytesRefBuilder();
List<NamedList<Object>> values = new ArrayList<>(superFacets.size());
for (Map.Entry<String, Integer> kv : superFacets) {
// Only sub-facet if parent facet has positive count - still may not be any values for the sub-field though
if (kv.getValue() >= getMinCountForField(field)) {
final String fieldValue = kv.getKey();
final int pivotCount = kv.getValue();
SimpleOrderedMap<Object> pivot = new SimpleOrderedMap<>();
pivot.add("field", field);
if (null == fieldValue) {
pivot.add("value", null);
} else {
ftype.readableToIndexed(fieldValue, termval);
pivot.add("value", ftype.toObject(sfield, termval.get()));
}
pivot.add("count", pivotCount);
final DocSet subset = getSubset(parsed.docs, sfield, fieldValue);
addPivotQueriesAndRanges(pivot, params, subset, facetQueries, facetRanges);
if (subField != null) {
NamedList<Integer> facetCounts;
if (!vnames.isEmpty()) {
String val = vnames.pop();
facetCounts = new NamedList<>();
facetCounts.add(val, getSubsetSize(subset, searcher.getSchema().getField(subField), val));
} else {
facetCounts = this.getTermCountsForPivots(subField, parsed.withDocs(subset));
}
if (facetCounts.size() >= 1) {
pivot.add("pivot", doPivots(facetCounts, subField, nextField, fnames, vnames, parsed.withDocs(subset), statsFields, facetQueries, facetRanges));
}
}
if ((isShard || 0 < pivotCount) && !statsFields.isEmpty()) {
Map<String, StatsValues> stv = new LinkedHashMap<>();
for (StatsField statsField : statsFields) {
stv.put(statsField.getOutputKey(), statsField.computeLocalStatsValues(subset));
}
pivot.add("stats", StatsComponent.convertToResponse(stv));
}
values.add(pivot);
}
}
// put the field back on the list
fnames.push(nextField);
return values;
}
Aggregations