use of java.util.TreeMap in project elasticsearch by elastic.
the class OperationRoutingTests method testBWC.
/**
* Ensures that all changes to the hash-function / shard selection are BWC
*/
public void testBWC() {
Map<String, Integer> termToShard = new TreeMap<>();
termToShard.put("sEERfFzPSI", 1);
termToShard.put("cNRiIrjzYd", 7);
termToShard.put("BgfLBXUyWT", 5);
termToShard.put("cnepjZhQnb", 3);
termToShard.put("OKCmuYkeCK", 6);
termToShard.put("OutXGRQUja", 5);
termToShard.put("yCdyocKWou", 1);
termToShard.put("KXuNWWNgVj", 2);
termToShard.put("DGJOYrpESx", 4);
termToShard.put("upLDybdTGs", 5);
termToShard.put("yhZhzCPQby", 1);
termToShard.put("EyCVeiCouA", 1);
termToShard.put("tFyVdQauWR", 6);
termToShard.put("nyeRYDnDQr", 6);
termToShard.put("hswhrppvDH", 0);
termToShard.put("BSiWvDOsNE", 5);
termToShard.put("YHicpFBSaY", 1);
termToShard.put("EquPtdKaBZ", 4);
termToShard.put("rSjLZHCDfT", 5);
termToShard.put("qoZALVcite", 7);
termToShard.put("yDCCPVBiCm", 7);
termToShard.put("ngizYtQgGK", 5);
termToShard.put("FYQRIBcNqz", 0);
termToShard.put("EBzEDAPODe", 2);
termToShard.put("YePigbXgKb", 1);
termToShard.put("PeGJjomyik", 3);
termToShard.put("cyQIvDmyYD", 7);
termToShard.put("yIEfZrYfRk", 5);
termToShard.put("kblouyFUbu", 7);
termToShard.put("xvIGbRiGJF", 3);
termToShard.put("KWimwsREPf", 4);
termToShard.put("wsNavvIcdk", 7);
termToShard.put("xkWaPcCmpT", 0);
termToShard.put("FKKTOnJMDy", 7);
termToShard.put("RuLzobYixn", 2);
termToShard.put("mFohLeFRvF", 4);
termToShard.put("aAMXnamRJg", 7);
termToShard.put("zKBMYJDmBI", 0);
termToShard.put("ElSVuJQQuw", 7);
termToShard.put("pezPtTQAAm", 7);
termToShard.put("zBjjNEjAex", 2);
termToShard.put("PGgHcLNPYX", 7);
termToShard.put("hOkpeQqTDF", 3);
termToShard.put("chZXraUPBH", 7);
termToShard.put("FAIcSmmNXq", 5);
termToShard.put("EZmDicyayC", 0);
termToShard.put("GRIueBeIyL", 7);
termToShard.put("qCChjGZYLp", 3);
termToShard.put("IsSZQwwnUT", 3);
termToShard.put("MGlxLFyyCK", 3);
termToShard.put("YmscwrKSpB", 0);
termToShard.put("czSljcjMop", 5);
termToShard.put("XhfGWwNlng", 1);
termToShard.put("cWpKJjlzgj", 7);
termToShard.put("eDzIfMKbvk", 1);
termToShard.put("WFFWYBfnTb", 0);
termToShard.put("oDdHJxGxja", 7);
termToShard.put("PDOQQqgIKE", 1);
termToShard.put("bGEIEBLATe", 6);
termToShard.put("xpRkJPWVpu", 2);
termToShard.put("kTwZnPEeIi", 2);
termToShard.put("DifcuqSsKk", 1);
termToShard.put("CEmLmljpXe", 5);
termToShard.put("cuNKtLtyJQ", 7);
termToShard.put("yNjiAnxAmt", 5);
termToShard.put("bVDJDCeaFm", 2);
termToShard.put("vdnUhGLFtl", 0);
termToShard.put("LnqSYezXbr", 5);
termToShard.put("EzHgydDCSR", 3);
termToShard.put("ZSKjhJlcpn", 1);
termToShard.put("WRjUoZwtUz", 3);
termToShard.put("RiBbcCdIgk", 4);
termToShard.put("yizTqyjuDn", 4);
termToShard.put("QnFjcpcZUT", 4);
termToShard.put("agYhXYUUpl", 7);
termToShard.put("UOjiTugjNC", 7);
termToShard.put("nICGuWTdfV", 0);
termToShard.put("NrnSmcnUVF", 2);
termToShard.put("ZSzFcbpDqP", 3);
termToShard.put("YOhahLSzzE", 5);
termToShard.put("iWswCilUaT", 1);
termToShard.put("zXAamKsRwj", 2);
termToShard.put("aqGsrUPHFq", 5);
termToShard.put("eDItImYWTS", 1);
termToShard.put("JAYDZMRcpW", 4);
termToShard.put("lmvAaEPflK", 7);
termToShard.put("IKuOwPjKCx", 5);
termToShard.put("schsINzlYB", 1);
termToShard.put("OqbFNxrKrF", 2);
termToShard.put("QrklDfvEJU", 6);
termToShard.put("VLxKRKdLbx", 4);
termToShard.put("imoydNTZhV", 1);
termToShard.put("uFZyTyOMRO", 4);
termToShard.put("nVAZVMPNNx", 3);
termToShard.put("rPIdESYaAO", 5);
termToShard.put("nbZWPWJsIM", 0);
termToShard.put("wRZXPSoEgd", 3);
termToShard.put("nGzpgwsSBc", 4);
termToShard.put("AITyyoyLLs", 4);
IndexMetaData metaData = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(8).numberOfReplicas(1).build();
for (Map.Entry<String, Integer> entry : termToShard.entrySet()) {
String key = entry.getKey();
int shard = randomBoolean() ? OperationRouting.generateShardId(metaData, key, null) : OperationRouting.generateShardId(metaData, "foobar", key);
assertEquals(shard, entry.getValue().intValue());
}
}
use of java.util.TreeMap in project elasticsearch by elastic.
the class TransportAnalyzeAction method extractExtendedAttributes.
/**
* other attribute extract object.
* Extracted object group by AttributeClassName
*
* @param stream current TokenStream
* @param includeAttributes filtering attributes
* @return Map<key value>
*/
private static Map<String, Object> extractExtendedAttributes(TokenStream stream, final Set<String> includeAttributes) {
final Map<String, Object> extendedAttributes = new TreeMap<>();
stream.reflectWith((attClass, key, value) -> {
if (CharTermAttribute.class.isAssignableFrom(attClass)) {
return;
}
if (PositionIncrementAttribute.class.isAssignableFrom(attClass)) {
return;
}
if (OffsetAttribute.class.isAssignableFrom(attClass)) {
return;
}
if (TypeAttribute.class.isAssignableFrom(attClass)) {
return;
}
if (includeAttributes == null || includeAttributes.isEmpty() || includeAttributes.contains(key.toLowerCase(Locale.ROOT))) {
if (value instanceof BytesRef) {
final BytesRef p = (BytesRef) value;
value = p.toString();
}
extendedAttributes.put(key, value);
}
});
return extendedAttributes;
}
use of java.util.TreeMap in project elasticsearch by elastic.
the class SimpleQueryStringBuilder method doToQuery.
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
// field names in builder can have wildcards etc, need to resolve them here
Map<String, Float> resolvedFieldsAndWeights = new TreeMap<>();
if ((useAllFields != null && useAllFields) && (fieldsAndWeights.size() != 0)) {
throw addValidationError("cannot use [all_fields] parameter in conjunction with [fields]", null);
}
// If explicitly required to use all fields, use all fields, OR:
// Automatically determine the fields (to replace the _all field) if all of the following are true:
// - The _all field is disabled,
// - and the default_field has not been changed in the settings
// - and no fields are specified in the request
Settings newSettings = new Settings(settings);
if ((this.useAllFields != null && this.useAllFields) || (context.getMapperService().allEnabled() == false && "_all".equals(context.defaultField()) && this.fieldsAndWeights.isEmpty())) {
resolvedFieldsAndWeights = QueryStringQueryBuilder.allQueryableDefaultFields(context);
// Need to use lenient mode when using "all-mode" so exceptions aren't thrown due to mismatched types
newSettings.lenient(lenientSet ? settings.lenient() : true);
} else {
// Use the default field if no fields specified
if (fieldsAndWeights.isEmpty()) {
resolvedFieldsAndWeights.put(resolveIndexName(context.defaultField(), context), AbstractQueryBuilder.DEFAULT_BOOST);
} else {
for (Map.Entry<String, Float> fieldEntry : fieldsAndWeights.entrySet()) {
if (Regex.isSimpleMatchPattern(fieldEntry.getKey())) {
for (String fieldName : context.getMapperService().simpleMatchToIndexNames(fieldEntry.getKey())) {
resolvedFieldsAndWeights.put(fieldName, fieldEntry.getValue());
}
} else {
resolvedFieldsAndWeights.put(resolveIndexName(fieldEntry.getKey(), context), fieldEntry.getValue());
}
}
}
}
// Use standard analyzer by default if none specified
Analyzer luceneAnalyzer;
if (analyzer == null) {
luceneAnalyzer = context.getMapperService().searchAnalyzer();
} else {
luceneAnalyzer = context.getIndexAnalyzers().get(analyzer);
if (luceneAnalyzer == null) {
throw new QueryShardException(context, "[" + SimpleQueryStringBuilder.NAME + "] analyzer [" + analyzer + "] not found");
}
}
SimpleQueryParser sqp = new SimpleQueryParser(luceneAnalyzer, resolvedFieldsAndWeights, flags, newSettings, context);
sqp.setDefaultOperator(defaultOperator.toBooleanClauseOccur());
Query query = sqp.parse(queryText);
return Queries.maybeApplyMinimumShouldMatch(query, minimumShouldMatch);
}
use of java.util.TreeMap in project elasticsearch by elastic.
the class WordDelimiterTokenFilterFactory method parseTypes.
/**
* parses a list of MappingCharFilter style rules into a custom byte[] type table
*/
static byte[] parseTypes(Collection<String> rules) {
SortedMap<Character, Byte> typeMap = new TreeMap<>();
for (String rule : rules) {
Matcher m = typePattern.matcher(rule);
if (!m.find())
throw new RuntimeException("Invalid Mapping Rule : [" + rule + "]");
String lhs = parseString(m.group(1).trim());
Byte rhs = parseType(m.group(2).trim());
if (lhs.length() != 1)
throw new RuntimeException("Invalid Mapping Rule : [" + rule + "]. Only a single character is allowed.");
if (rhs == null)
throw new RuntimeException("Invalid Mapping Rule : [" + rule + "]. Illegal type.");
typeMap.put(lhs.charAt(0), rhs);
}
// ensure the table is always at least as big as DEFAULT_WORD_DELIM_TABLE for performance
byte[] types = new byte[Math.max(typeMap.lastKey() + 1, WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE.length)];
for (int i = 0; i < types.length; i++) types[i] = WordDelimiterIterator.getType(i);
for (Map.Entry<Character, Byte> mapping : typeMap.entrySet()) types[mapping.getKey()] = mapping.getValue();
return types;
}
use of java.util.TreeMap in project buck by facebook.
the class VersionedTargetGraphBuilder method getTranslateBuildTarget.
/**
* @return the {@link BuildTarget} to use in the resolved target graph, formed by adding a
* flavor generated from the given version selections.
*/
private Optional<BuildTarget> getTranslateBuildTarget(TargetNode<?, ?> node, ImmutableMap<BuildTarget, Version> selectedVersions) {
BuildTarget originalTarget = node.getBuildTarget();
node = resolveVersions(node, selectedVersions);
BuildTarget newTarget = node.getBuildTarget();
if (TargetGraphVersionTransformations.isVersionPropagator(node)) {
VersionInfo info = getVersionInfo(node);
Collection<BuildTarget> versionedDeps = info.getVersionDomain().keySet();
TreeMap<BuildTarget, Version> versions = new TreeMap<>();
for (BuildTarget depTarget : versionedDeps) {
versions.put(depTarget, selectedVersions.get(depTarget));
}
if (!versions.isEmpty()) {
Flavor versionedFlavor = getVersionedFlavor(versions);
newTarget = node.getBuildTarget().withAppendedFlavors(versionedFlavor);
}
}
return newTarget.equals(originalTarget) ? Optional.empty() : Optional.of(newTarget);
}
Aggregations