use of datawave.query.attributes.UniqueFields in project datawave by NationalSecurityAgency.
the class UniqueTransformTest method tearDown.
@After
public void tearDown() throws Exception {
inputDocuments.clear();
expectedUniqueDocuments.clear();
uniqueFields = new UniqueFields();
expectedOrderedFieldSets.clear();
}
use of datawave.query.attributes.UniqueFields in project datawave by NationalSecurityAgency.
the class ShardQueryLogic method loadQueryParameters.
protected void loadQueryParameters(ShardQueryConfiguration config, Query settings) throws QueryException {
TraceStopwatch stopwatch = config.getTimers().newStartedStopwatch("ShardQueryLogic - Parse query parameters");
boolean rawDataOnly = false;
String rawDataOnlyStr = settings.findParameter(QueryParameters.RAW_DATA_ONLY).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(rawDataOnlyStr)) {
rawDataOnly = Boolean.valueOf(rawDataOnlyStr);
// note that if any of these other options are set, then it overrides the settings here
if (rawDataOnly) {
// set the grouping context to trye to ensure we get the full field names
this.setIncludeGroupingContext(true);
config.setIncludeGroupingContext(true);
// set the hierarchy fields to false as they are generated fields
this.setIncludeHierarchyFields(false);
config.setIncludeHierarchyFields(false);
// set the datatype field to false as it is a generated field
this.setIncludeDataTypeAsField(false);
config.setIncludeDataTypeAsField(false);
// do not include the record id
this.setIncludeRecordId(false);
config.setIncludeRecordId(false);
// set the hit list to false as it is a generated field
this.setHitList(false);
config.setHitList(false);
// set the raw types to true to avoid any type transformations of the values
config.setRawTypes(true);
// do not filter masked values
this.setFilterMaskedValues(false);
config.setFilterMaskedValues(false);
// do not reduce the response
this.setReducedResponse(false);
config.setReducedResponse(false);
// clear the content field names to prevent content field transformations (see DocumentTransformer)
this.setContentFieldNames(Collections.EMPTY_LIST);
// clear the model name to avoid field name translations
this.setModelName(null);
config.setModelName(null);
}
}
// Get the datatype set if specified
String typeList = settings.findParameter(QueryParameters.DATATYPE_FILTER_SET).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(typeList)) {
HashSet<String> typeFilter = new HashSet<>();
typeFilter.addAll(Arrays.asList(StringUtils.split(typeList, Constants.PARAM_VALUE_SEP)));
if (log.isDebugEnabled()) {
log.debug("Type Filter: " + typeFilter);
}
config.setDatatypeFilter(typeFilter);
}
// Get the list of fields to project up the stack. May be null.
String projectFields = settings.findParameter(QueryParameters.RETURN_FIELDS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(projectFields)) {
List<String> projectFieldsList = Arrays.asList(StringUtils.split(projectFields, Constants.PARAM_VALUE_SEP));
// Only set the projection fields if we were actually given some
if (!projectFieldsList.isEmpty()) {
config.setProjectFields(new HashSet<>(projectFieldsList));
if (log.isDebugEnabled()) {
final int maxLen = 100;
// Trim down the projection if it's stupid long
projectFields = maxLen < projectFields.length() ? projectFields.substring(0, maxLen) + "[TRUNCATED]" : projectFields;
log.debug("Projection fields: " + projectFields);
}
}
}
// if the TRANFORM_CONTENT_TO_UID is false, then unset the list of content field names preventing the DocumentTransformer from
// transforming them.
String transformContentStr = settings.findParameter(QueryParameters.TRANFORM_CONTENT_TO_UID).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(transformContentStr)) {
if (!Boolean.valueOf(transformContentStr)) {
setContentFieldNames(Collections.EMPTY_LIST);
}
}
// Get the list of blacklisted fields. May be null.
String tBlacklistedFields = settings.findParameter(QueryParameters.BLACKLISTED_FIELDS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(tBlacklistedFields)) {
List<String> blacklistedFieldsList = Arrays.asList(StringUtils.split(tBlacklistedFields, Constants.PARAM_VALUE_SEP));
// Only set the blacklisted fields if we were actually given some
if (!blacklistedFieldsList.isEmpty()) {
if (!config.getProjectFields().isEmpty()) {
throw new QueryException("Whitelist and blacklist projection options are mutually exclusive");
}
config.setBlacklistedFields(new HashSet<>(blacklistedFieldsList));
if (log.isDebugEnabled()) {
log.debug("Blacklisted fields: " + tBlacklistedFields);
}
}
}
// Get the LIMIT_FIELDS parameter if given
String limitFields = settings.findParameter(QueryParameters.LIMIT_FIELDS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(limitFields)) {
List<String> limitFieldsList = Arrays.asList(StringUtils.split(limitFields, Constants.PARAM_VALUE_SEP));
// Only set the limit fields if we were actually given some
if (!limitFieldsList.isEmpty()) {
config.setLimitFields(new HashSet<>(limitFieldsList));
}
}
String limitFieldsPreQueryEvaluation = settings.findParameter(QueryOptions.LIMIT_FIELDS_PRE_QUERY_EVALUATION).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(limitFieldsPreQueryEvaluation)) {
Boolean limitFieldsPreQueryEvaluationValue = Boolean.parseBoolean(limitFieldsPreQueryEvaluation);
this.setLimitFieldsPreQueryEvaluation(limitFieldsPreQueryEvaluationValue);
config.setLimitFieldsPreQueryEvaluation(limitFieldsPreQueryEvaluationValue);
}
String limitFieldsField = settings.findParameter(QueryOptions.LIMIT_FIELDS_FIELD).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(limitFieldsField)) {
this.setLimitFieldsField(limitFieldsField);
config.setLimitFieldsField(limitFieldsField);
}
// Get the GROUP_FIELDS parameter if given
String groupFields = settings.findParameter(QueryParameters.GROUP_FIELDS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(groupFields)) {
List<String> groupFieldsList = Arrays.asList(StringUtils.split(groupFields, Constants.PARAM_VALUE_SEP));
// Only set the group fields if we were actually given some
if (!groupFieldsList.isEmpty()) {
this.setGroupFields(new HashSet<>(groupFieldsList));
config.setGroupFields(new HashSet<>(groupFieldsList));
config.setProjectFields(new HashSet<>(groupFieldsList));
}
}
String groupFieldsBatchSizeString = settings.findParameter(QueryParameters.GROUP_FIELDS_BATCH_SIZE).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(groupFieldsBatchSizeString)) {
int groupFieldsBatchSize = Integer.parseInt(groupFieldsBatchSizeString);
this.setGroupFieldsBatchSize(groupFieldsBatchSize);
config.setGroupFieldsBatchSize(groupFieldsBatchSize);
}
// Get the UNIQUE_FIELDS parameter if given
String uniqueFieldsParam = settings.findParameter(QueryParameters.UNIQUE_FIELDS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(uniqueFieldsParam)) {
UniqueFields uniqueFields = UniqueFields.from(uniqueFieldsParam);
// Only set the unique fields if we were actually given some
if (!uniqueFields.isEmpty()) {
this.setUniqueFields(uniqueFields);
config.setUniqueFields(uniqueFields);
}
}
// Get the HIT_LIST parameter if given
String hitListString = settings.findParameter(QueryParameters.HIT_LIST).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(hitListString)) {
Boolean hitListBool = Boolean.parseBoolean(hitListString);
config.setHitList(hitListBool);
}
// Get the BYPASS_ACCUMULO parameter if given
String bypassAccumuloString = settings.findParameter(BYPASS_ACCUMULO).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(bypassAccumuloString)) {
Boolean bypassAccumuloBool = Boolean.parseBoolean(bypassAccumuloString);
config.setBypassAccumulo(bypassAccumuloBool);
}
// Get the DATE_INDEX_TIME_TRAVEL parameter if given
String dateIndexTimeTravelString = settings.findParameter(QueryOptions.DATE_INDEX_TIME_TRAVEL).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(dateIndexTimeTravelString)) {
Boolean dateIndexTimeTravel = Boolean.parseBoolean(dateIndexTimeTravelString);
config.setDateIndexTimeTravel(dateIndexTimeTravel);
}
// get the RAW_TYPES parameter if given
String rawTypesString = settings.findParameter(QueryParameters.RAW_TYPES).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(rawTypesString)) {
Boolean rawTypesBool = Boolean.parseBoolean(rawTypesString);
config.setRawTypes(rawTypesBool);
}
// Get the FILTER_MASKED_VALUES spring setting
String filterMaskedValuesStr = settings.findParameter(QueryParameters.FILTER_MASKED_VALUES).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(filterMaskedValuesStr)) {
Boolean filterMaskedValuesBool = Boolean.parseBoolean(filterMaskedValuesStr);
this.setFilterMaskedValues(filterMaskedValuesBool);
config.setFilterMaskedValues(filterMaskedValuesBool);
}
// Get the INCLUDE_DATATYPE_AS_FIELD spring setting
String includeDatatypeAsFieldStr = settings.findParameter(QueryParameters.INCLUDE_DATATYPE_AS_FIELD).getParameterValue().trim();
if (((org.apache.commons.lang.StringUtils.isNotBlank(includeDatatypeAsFieldStr) && Boolean.valueOf(includeDatatypeAsFieldStr))) || (this.getIncludeDataTypeAsField() && !rawDataOnly)) {
config.setIncludeDataTypeAsField(true);
}
// Get the INCLUDE_RECORD_ID spring setting
String includeRecordIdStr = settings.findParameter(QueryParameters.INCLUDE_RECORD_ID).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(includeRecordIdStr)) {
boolean includeRecordIdBool = Boolean.parseBoolean(includeRecordIdStr) && !rawDataOnly;
this.setIncludeRecordId(includeRecordIdBool);
config.setIncludeRecordId(includeRecordIdBool);
}
// Get the INCLUDE_HIERARCHY_FIELDS spring setting
String includeHierarchyFieldsStr = settings.findParameter(QueryParameters.INCLUDE_HIERARCHY_FIELDS).getParameterValue().trim();
if (((org.apache.commons.lang.StringUtils.isNotBlank(includeHierarchyFieldsStr) && Boolean.valueOf(includeHierarchyFieldsStr))) || (this.getIncludeHierarchyFields() && !rawDataOnly)) {
config.setIncludeHierarchyFields(true);
final Map<String, String> options = this.getHierarchyFieldOptions();
config.setHierarchyFieldOptions(options);
}
// Get the query profile to allow us to select the tune profile of the query
String queryProfile = settings.findParameter(QueryParameters.QUERY_PROFILE).getParameterValue().trim();
if ((org.apache.commons.lang.StringUtils.isNotBlank(queryProfile))) {
selectedProfile = configuredProfiles.get(queryProfile);
if (null == selectedProfile) {
throw new QueryException(QueryParameters.QUERY_PROFILE + " has been specified but " + queryProfile + " is not a selectable profile");
}
}
// Get the include.grouping.context = true/false spring setting
String includeGroupingContextStr = settings.findParameter(QueryParameters.INCLUDE_GROUPING_CONTEXT).getParameterValue().trim();
if (((org.apache.commons.lang.StringUtils.isNotBlank(includeGroupingContextStr) && Boolean.valueOf(includeGroupingContextStr))) || (this.getIncludeGroupingContext() && !rawDataOnly)) {
config.setIncludeGroupingContext(true);
}
// Check if the default modelName and modelTableNames have been overridden by custom parameters.
String parameterModelName = settings.findParameter(QueryParameters.PARAMETER_MODEL_NAME).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(parameterModelName)) {
this.setModelName(parameterModelName);
}
config.setModelName(this.getModelName());
String parameterModelTableName = settings.findParameter(QueryParameters.PARAMETER_MODEL_TABLE_NAME).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(parameterModelTableName)) {
this.setModelTableName(parameterModelTableName);
}
if (null != config.getModelName() && null == config.getModelTableName()) {
throw new IllegalArgumentException(QueryParameters.PARAMETER_MODEL_NAME + " has been specified but " + QueryParameters.PARAMETER_MODEL_TABLE_NAME + " is missing. Both are required to use a model");
}
configureDocumentAggregation(settings);
config.setLimitTermExpansionToModel(this.isExpansionLimitedToModelContents());
String reducedResponseStr = settings.findParameter(QueryOptions.REDUCED_RESPONSE).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(reducedResponseStr)) {
Boolean reducedResponseValue = Boolean.parseBoolean(reducedResponseStr);
this.setReducedResponse(reducedResponseValue);
config.setReducedResponse(reducedResponseValue);
}
final String postProcessingClasses = settings.findParameter(QueryOptions.POSTPROCESSING_CLASSES).getParameterValue().trim();
final String postProcessingOptions = settings.findParameter(QueryOptions.POSTPROCESSING_OPTIONS).getParameterValue().trim();
// build the post p
if (org.apache.commons.lang.StringUtils.isNotBlank(postProcessingClasses)) {
List<String> filterClasses = config.getFilterClassNames();
if (null == filterClasses) {
filterClasses = new ArrayList<>();
}
for (String fClassName : StringUtils.splitIterable(postProcessingClasses, ',', true)) {
filterClasses.add(fClassName);
}
config.setFilterClassNames(filterClasses);
final Map<String, String> options = this.getFilterOptions();
if (null != options) {
config.putFilterOptions(options);
}
if (org.apache.commons.lang.StringUtils.isNotBlank(postProcessingOptions)) {
for (String filterOptionStr : StringUtils.splitIterable(postProcessingOptions, ',', true)) {
if (org.apache.commons.lang.StringUtils.isNotBlank(filterOptionStr)) {
final String filterValueString = settings.findParameter(filterOptionStr).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(filterValueString)) {
config.putFilterOptions(filterOptionStr, filterValueString);
}
}
}
}
}
String tCompressServerSideResults = settings.findParameter(QueryOptions.COMPRESS_SERVER_SIDE_RESULTS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(tCompressServerSideResults)) {
boolean compress = Boolean.parseBoolean(tCompressServerSideResults);
config.setCompressServerSideResults(compress);
}
// Configure index-only filter functions to be enabled if not already set to such a state
config.setIndexOnlyFilterFunctionsEnabled(this.isIndexOnlyFilterFunctionsEnabled());
// Set the ReturnType for Documents coming out of the iterator stack
config.setReturnType(DocumentSerialization.getReturnType(settings));
QueryLogicTransformer transformer = getTransformer(settings);
if (transformer instanceof WritesQueryMetrics) {
String logTimingDetailsStr = settings.findParameter(QueryOptions.LOG_TIMING_DETAILS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(logTimingDetailsStr)) {
setLogTimingDetails(Boolean.valueOf(logTimingDetailsStr));
}
if (getLogTimingDetails()) {
// we have to collect the timing details on the iterator stack in order to log them
setCollectTimingDetails(true);
} else {
String collectTimingDetailsStr = settings.findParameter(QueryOptions.COLLECT_TIMING_DETAILS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(collectTimingDetailsStr)) {
setCollectTimingDetails(Boolean.valueOf(collectTimingDetailsStr));
}
}
} else {
// if the transformer can not process the timing metrics, then turn them off
setLogTimingDetails(false);
setCollectTimingDetails(false);
}
stopwatch.stop();
if (null != selectedProfile) {
selectedProfile.configure(this);
selectedProfile.configure(config);
selectedProfile.configure(planner);
}
}
use of datawave.query.attributes.UniqueFields in project datawave by NationalSecurityAgency.
the class QueryOptionsSwitch method apply.
public static void apply(Map<String, String> optionsMap, ShardQueryConfiguration config) {
for (Map.Entry<String, String> entry : optionsMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
switch(key) {
case QueryParameters.INCLUDE_GROUPING_CONTEXT:
config.setIncludeGroupingContext(Boolean.parseBoolean(value));
break;
case QueryParameters.HIT_LIST:
config.setHitList(Boolean.parseBoolean(value));
break;
case QueryParameters.LIMIT_FIELDS:
String[] lf = StringUtils.split(value, Constants.PARAM_VALUE_SEP);
config.setLimitFields(Sets.newHashSet(lf));
break;
case QueryParameters.GROUP_FIELDS:
String[] groups = StringUtils.split(value, Constants.PARAM_VALUE_SEP);
config.setGroupFields(Sets.newHashSet(groups));
config.setProjectFields(Sets.newHashSet(groups));
break;
case QueryParameters.GROUP_FIELDS_BATCH_SIZE:
try {
config.setGroupFieldsBatchSize(Integer.parseInt(value));
} catch (Exception ex) {
log.warn("Could not parse " + value + " as group.fields.batch.size");
}
break;
case QueryParameters.UNIQUE_FIELDS:
UniqueFields uniqueFields = UniqueFields.from(value);
config.setUniqueFields(uniqueFields);
}
}
}
use of datawave.query.attributes.UniqueFields in project datawave by NationalSecurityAgency.
the class ShardQueryConfigurationTest method testDeepCopyConstructor.
/**
* Test that for a given set of collections, stored in a ShardQueryConfiguration, will in fact be deep-copied into a new ShardQueryConfiguration object.
*/
@Test
public void testDeepCopyConstructor() {
// Instantiate a 'other' ShardQueryConfiguration
ShardQueryConfiguration other = ShardQueryConfiguration.create();
// Setup collections for deep copy
List<String> realmSuffixExclusionPatterns = Lists.newArrayList("somePattern");
SimpleDateFormat shardDateFormatter = new SimpleDateFormat("yyyyMMdd");
List<String> enricherClassNames = Lists.newArrayList("enricherClassNameA");
List<String> filterClassNames = Lists.newArrayList("filterClassNameA");
List<String> indexFilteringClassNames = Lists.newArrayList("indexFilteringClassNameA");
Set<String> nonEventKeyPrefixes = Sets.newHashSet("nonEventKeyPrefixA");
Set<String> unevaluatedFields = Sets.newHashSet("unevaluatedFieldA");
Set<String> dataTypeFilter = Sets.newHashSet("dataTypeFilterA");
IndexHole indexHole = new IndexHole(new String[] { "0", "1" }, new String[] { "2", "3" });
List<IndexHole> indexHoles = Lists.newArrayList(indexHole);
Set<String> projectFields = Sets.newHashSet("projectFieldA");
Set<String> blacklistedFields = Sets.newHashSet("blacklistedFieldA");
Set<String> indexedFields = Sets.newHashSet("indexedFieldA");
Set<String> normalizedFields = Sets.newHashSet("normalizedFieldA");
Multimap<String, Type<?>> dataTypes = HashMultimap.create();
dataTypes.put("K001", new NoOpType("V"));
Multimap<String, Type<?>> queryFieldsDatatypes = HashMultimap.create();
queryFieldsDatatypes.put("K002", new NoOpType("V"));
Multimap<String, Type<?>> normalizedFieldsDatatypes = HashMultimap.create();
normalizedFieldsDatatypes.put("K003", new NoOpType("V"));
Multimap<String, String> compositeToFieldMap = HashMultimap.create();
compositeToFieldMap.put("K004", "V");
Map<String, DiscreteIndexType<?>> fieldToDiscreteIndexType = Maps.newHashMap();
fieldToDiscreteIndexType.put("GEO", new GeometryType());
Map<String, Date> compositeTransitionDates = Maps.newHashMap();
Date transitionDate = new Date();
compositeTransitionDates.put("K005", transitionDate);
Map<String, String> compositeFieldSeparators = Maps.newHashMap();
compositeFieldSeparators.put("GEO", " ");
Set<String> queryTermFrequencyFields = Sets.newHashSet("fieldA");
Set<String> limitFields = Sets.newHashSet("limitFieldA");
Map<String, String> hierarchyFieldOptions = Maps.newHashMap();
hierarchyFieldOptions.put("K006", "V");
List<String> documentPermutations = Lists.newArrayList(DocumentPermutation.class.getName());
QueryModel queryModel = new QueryModel();
QueryImpl query = new QueryImpl();
Set<String> groupFields = Sets.newHashSet("groupFieldA");
UniqueFields uniqueFields = new UniqueFields();
uniqueFields.put("uniqueFieldA", UniqueGranularity.ALL);
List<String> contentFieldNames = Lists.newArrayList("fieldA");
Set<String> noExpansionFields = Sets.newHashSet("NoExpansionFieldA");
Set<String> disallowedRegexPatterns = Sets.newHashSet(".*", ".*?");
// Set collections on 'other' ShardQueryConfiguration
other.setRealmSuffixExclusionPatterns(realmSuffixExclusionPatterns);
other.setShardDateFormatter(shardDateFormatter);
other.setEnricherClassNames(enricherClassNames);
other.setFilterClassNames(filterClassNames);
other.setIndexFilteringClassNames(indexFilteringClassNames);
other.setNonEventKeyPrefixes(nonEventKeyPrefixes);
other.setUnevaluatedFields(unevaluatedFields);
other.setDatatypeFilter(dataTypeFilter);
other.setIndexHoles(indexHoles);
other.setProjectFields(projectFields);
other.setBlacklistedFields(blacklistedFields);
other.setIndexedFields(indexedFields);
other.setNormalizedFields(normalizedFields);
other.setDataTypes(dataTypes);
other.setQueryFieldsDatatypes(queryFieldsDatatypes);
other.setNormalizedFieldsDatatypes(normalizedFieldsDatatypes);
other.setCompositeToFieldMap(compositeToFieldMap);
other.setFieldToDiscreteIndexTypes(fieldToDiscreteIndexType);
other.setCompositeTransitionDates(compositeTransitionDates);
other.setCompositeFieldSeparators(compositeFieldSeparators);
other.setQueryTermFrequencyFields(queryTermFrequencyFields);
other.setLimitFields(limitFields);
other.setHierarchyFieldOptions(hierarchyFieldOptions);
other.setDocumentPermutations(documentPermutations);
other.setQueryModel(queryModel);
other.setQuery(query);
other.setGroupFields(groupFields);
other.setUniqueFields(uniqueFields);
other.setContentFieldNames(contentFieldNames);
other.setNoExpansionFields(noExpansionFields);
other.setDisallowedRegexPatterns(disallowedRegexPatterns);
// Copy 'other' ShardQueryConfiguration into a new config
ShardQueryConfiguration config = ShardQueryConfiguration.create(other);
// Modify original collections
realmSuffixExclusionPatterns.add("anotherPattern");
shardDateFormatter = new SimpleDateFormat("yyyyMMdd-mm:SS");
enricherClassNames.add("enricherClassNameB");
filterClassNames.add("filterClassNameB");
indexFilteringClassNames.add("indexFilteringClassNameB");
nonEventKeyPrefixes.add("nonEventKeyPrefixB");
unevaluatedFields.add("unevaluatedFieldB");
dataTypeFilter.add("dataTypeFilterB");
IndexHole otherIndexHole = new IndexHole(new String[] { "4", "5" }, new String[] { "6", "7" });
indexHoles.add(otherIndexHole);
projectFields.add("projectFieldB");
blacklistedFields.add("blacklistedFieldB");
indexedFields.add("indexedFieldB");
normalizedFields.add("normalizedFieldB");
dataTypes.put("K2", new NoOpType("V2"));
queryFieldsDatatypes.put("K", new NoOpType("V2"));
normalizedFieldsDatatypes.put("K2", new NoOpType("V2"));
compositeToFieldMap.put("K2", "V2");
queryTermFrequencyFields.add("fieldB");
limitFields.add("limitFieldB");
hierarchyFieldOptions.put("K2", "V2");
documentPermutations.add(DocumentProjection.class.getName());
queryModel.addTermToModel("aliasA", "diskNameA");
query.setId(UUID.randomUUID());
groupFields.add("groupFieldB");
uniqueFields.put("uniqueFieldB", UniqueGranularity.ALL);
contentFieldNames.add("fieldB");
disallowedRegexPatterns.add("blah");
// Assert that copied collections were deep copied and remain unchanged
Assert.assertEquals(Lists.newArrayList("somePattern"), config.getRealmSuffixExclusionPatterns());
Assert.assertEquals(new SimpleDateFormat("yyyyMMdd"), config.getShardDateFormatter());
Assert.assertEquals(Lists.newArrayList("enricherClassNameA"), config.getEnricherClassNames());
Assert.assertEquals(Lists.newArrayList("filterClassNameA"), config.getFilterClassNames());
Assert.assertEquals(Lists.newArrayList("indexFilteringClassNameA"), config.getIndexFilteringClassNames());
Assert.assertEquals(Sets.newHashSet("nonEventKeyPrefixA"), config.getNonEventKeyPrefixes());
Assert.assertEquals(Sets.newHashSet("unevaluatedFieldA"), config.getUnevaluatedFields());
Assert.assertEquals(Sets.newHashSet("dataTypeFilterA"), config.getDatatypeFilter());
IndexHole expectedIndexHole = new IndexHole(new String[] { "0", "1" }, new String[] { "2", "3" });
Assert.assertEquals(Lists.newArrayList(expectedIndexHole), config.getIndexHoles());
Assert.assertEquals(Sets.newHashSet("projectFieldA"), config.getProjectFields());
Assert.assertEquals(Sets.newHashSet("blacklistedFieldA"), config.getBlacklistedFields());
Assert.assertEquals(Sets.newHashSet("indexedFieldA"), config.getIndexedFields());
// This assert is different from the setter as setNormalizedFieldsAsDatatypes will overwrite the normalizedFields with
// a new keyset.
Assert.assertEquals(Sets.newHashSet("K003"), config.getNormalizedFields());
Multimap<String, Type<?>> expectedDataTypes = HashMultimap.create();
expectedDataTypes.put("K001", new NoOpType("V"));
Assert.assertEquals(expectedDataTypes, config.getDataTypes());
Multimap<String, Type<?>> expectedQueryFieldsDatatypes = HashMultimap.create();
expectedQueryFieldsDatatypes.put("K002", new NoOpType("V"));
Assert.assertEquals(expectedQueryFieldsDatatypes, config.getQueryFieldsDatatypes());
Multimap<String, Type<?>> expectedNormalizedFieldsDatatypes = HashMultimap.create();
expectedNormalizedFieldsDatatypes.put("K003", new NoOpType("V"));
Assert.assertEquals(expectedNormalizedFieldsDatatypes, config.getNormalizedFieldsDatatypes());
Multimap<String, String> expectedCompositeToFieldMap = ArrayListMultimap.create();
expectedCompositeToFieldMap.put("K004", "V");
Assert.assertEquals(expectedCompositeToFieldMap, config.getCompositeToFieldMap());
Map<String, DiscreteIndexType<?>> expectedFieldToDiscreteIndexType = Maps.newHashMap();
expectedFieldToDiscreteIndexType.put("GEO", new GeometryType());
Assert.assertEquals(expectedFieldToDiscreteIndexType, config.getFieldToDiscreteIndexTypes());
Map<String, Date> expectedCompositeTransitionDates = Maps.newHashMap();
expectedCompositeTransitionDates.put("K005", transitionDate);
Assert.assertEquals(expectedCompositeTransitionDates, config.getCompositeTransitionDates());
Map<String, String> expectedCompositeFieldSeparators = Maps.newHashMap();
expectedCompositeFieldSeparators.put("GEO", " ");
Assert.assertEquals(expectedCompositeFieldSeparators, config.getCompositeFieldSeparators());
Assert.assertEquals(Sets.newHashSet("fieldA"), config.getQueryTermFrequencyFields());
Assert.assertEquals(Sets.newHashSet("limitFieldA"), config.getLimitFields());
Map<String, String> expectedHierarchyFieldOptions = Maps.newHashMap();
expectedHierarchyFieldOptions.put("K006", "V");
Assert.assertEquals(expectedHierarchyFieldOptions, config.getHierarchyFieldOptions());
Assert.assertEquals(Lists.newArrayList(DocumentPermutation.class.getName()), config.getDocumentPermutations());
QueryModel expectedQueryModel = new QueryModel();
Assert.assertEquals(expectedQueryModel.getForwardQueryMapping(), config.getQueryModel().getForwardQueryMapping());
Assert.assertEquals(expectedQueryModel.getReverseQueryMapping(), config.getQueryModel().getReverseQueryMapping());
Assert.assertEquals(expectedQueryModel.getUnevaluatedFields(), config.getQueryModel().getUnevaluatedFields());
Assert.assertEquals(Sets.newHashSet(".*", ".*?"), config.getDisallowedRegexPatterns());
// Account for QueryImpl.duplicate() generating a random UUID on the duplicate
QueryImpl expectedQuery = new QueryImpl();
expectedQuery.setId(config.getQuery().getId());
Assert.assertEquals(expectedQuery, config.getQuery());
Assert.assertEquals(Sets.newHashSet("groupFieldA"), config.getGroupFields());
UniqueFields expectedUniqueFields = new UniqueFields();
expectedUniqueFields.put("uniqueFieldA", UniqueGranularity.ALL);
Assert.assertEquals(expectedUniqueFields, config.getUniqueFields());
Assert.assertEquals(Lists.newArrayList("fieldA"), config.getContentFieldNames());
Assert.assertEquals(Sets.newHashSet("NoExpansionFieldA"), config.getNoExpansionFields());
}
use of datawave.query.attributes.UniqueFields in project datawave by NationalSecurityAgency.
the class QueryOptionsFromQueryVisitor method updateUniqueFieldsOption.
// Update the {@value QueryParameters#UNIQUE_FIELDS} option to include the given unique fields.
private void updateUniqueFieldsOption(Map<String, String> optionsMap, UniqueFields uniqueFields) {
// Combine with any previously found unique fields.
if (optionsMap.containsKey(QueryParameters.UNIQUE_FIELDS)) {
UniqueFields existingFields = UniqueFields.from(optionsMap.get(QueryParameters.UNIQUE_FIELDS));
uniqueFields.putAll(existingFields.getFieldMap());
}
optionsMap.put(QueryParameters.UNIQUE_FIELDS, uniqueFields.toString());
}
Aggregations