use of datawave.query.attributes.Attribute in project datawave by NationalSecurityAgency.
the class LimitFields method apply.
@Override
public Entry<Key, Document> apply(Entry<Key, Document> entry) {
// key is the limited field name with _ORIGINAL_COUNT appended,
// value will be set to the original count of that field in the document
Map<String, Integer> limitedFieldCounts = new HashMap<>();
Document document = entry.getValue();
Map<String, String> hitTermMap = this.getHitTermMap(document);
Multimap<String, Attribute<? extends Comparable<?>>> reducedMap = LinkedListMultimap.create();
Map<String, Integer> countForFieldMap = Maps.newHashMap();
// maps from the key with NO grouping context to a multimap of
// key WITH grouping context to attributes:
// DIRECTION : [DIRECTION.1 : [over,under], DIRECTION.2 : [sideways,down]]
LoadingCache<String, Multimap<String, Attribute<? extends Comparable<?>>>> hits = CacheBuilder.newBuilder().build(new CacheLoader<String, Multimap<String, Attribute<? extends Comparable<?>>>>() {
public Multimap<String, Attribute<? extends Comparable<?>>> load(String key) {
return LinkedListMultimap.create();
}
});
// maps from the key with NO grouping context to a multimap of
// key WITH grouping context to attributes:
// DIRECTION : [DIRECTION.1 : [over,under], DIRECTION.2 : [sideways,down]]
@SuppressWarnings("serial") LoadingCache<String, Multimap<String, Attribute<? extends Comparable<?>>>> misses = CacheBuilder.newBuilder().build(new CacheLoader<String, Multimap<String, Attribute<? extends Comparable<?>>>>() {
public Multimap<String, Attribute<? extends Comparable<?>>> load(String key) {
return LinkedListMultimap.create();
}
});
for (Map.Entry<String, Attribute<? extends Comparable<?>>> de : document.entrySet()) {
String keyWithGrouping = de.getKey();
String keyNoGrouping = keyWithGrouping;
// if we have grouping context on, remove the grouping context
if (keyNoGrouping.indexOf('.') != -1) {
keyNoGrouping = keyNoGrouping.substring(0, keyNoGrouping.indexOf('.'));
}
// limit value for _ANYFIELD_
if (this.limitFieldsMap.containsKey("_ANYFIELD_") && this.limitFieldsMap.containsKey(keyNoGrouping) == false) {
this.limitFieldsMap.put(keyNoGrouping, this.limitFieldsMap.get("_ANYFIELD_"));
log.trace("added " + keyNoGrouping + " - " + this.limitFieldsMap.get(keyNoGrouping) + " to the limitFieldsMap because of the _ANYFIELD_ entry");
}
if (this.limitFieldsMap.containsKey(keyNoGrouping)) {
// look for the key without the grouping context
if (log.isTraceEnabled())
log.trace("limitFieldsMap contains " + keyNoGrouping);
Attribute<?> attr = de.getValue();
// used below if you un-comment to get all hits
int limit = this.limitFieldsMap.get(keyNoGrouping);
if (attr instanceof Attributes) {
Attributes attrs = (Attributes) attr;
Set<Attribute<? extends Comparable<?>>> attrSet = attrs.getAttributes();
for (Attribute<? extends Comparable<?>> value : attrSet) {
manageHitsAndMisses(keyWithGrouping, keyNoGrouping, value, hitTermMap, hits, misses, countForFieldMap);
}
} else {
manageHitsAndMisses(keyWithGrouping, keyNoGrouping, attr, hitTermMap, hits, misses, countForFieldMap);
}
}
}
for (String keyNoGrouping : countForFieldMap.keySet()) {
int limit = this.limitFieldsMap.get(keyNoGrouping);
Multimap<String, Attribute<? extends Comparable<?>>> hitMap = hits.getUnchecked(keyNoGrouping);
for (String keyWithGrouping : hitMap.keySet()) {
for (Attribute<? extends Comparable<?>> value : hitMap.get(keyWithGrouping)) {
// if(limit <= 0) break; // comment this line if you want to get ALL hits even if the limit is exceeded
reducedMap.put(keyWithGrouping, value);
limit--;
}
}
Multimap<String, Attribute<? extends Comparable<?>>> missMap = misses.getUnchecked(keyNoGrouping);
for (String keyWithGrouping : missMap.keySet()) {
for (Attribute<? extends Comparable<?>> value : missMap.get(keyWithGrouping)) {
if (limit <= 0)
break;
reducedMap.put(keyWithGrouping, value);
limit--;
}
}
if (log.isTraceEnabled()) {
log.trace("reducedMap:" + reducedMap);
log.trace("mapOfHits:" + hits.asMap());
log.trace("mapOfMisses:" + misses.asMap());
}
// only generate an original count if a field was reduced
if (countForFieldMap.get(keyNoGrouping) > this.limitFieldsMap.get(keyNoGrouping)) {
limitedFieldCounts.put(keyNoGrouping + ORIGINAL_COUNT_SUFFIX, countForFieldMap.get(keyNoGrouping));
}
}
// mutate the document with the changes collected in the above loop
applyCounts(document, limitedFieldCounts);
Map<String, Multimap<String, Attribute<? extends Comparable<?>>>> toRemove = Maps.newLinkedHashMap();
toRemove.putAll(hits.asMap());
toRemove.putAll(misses.asMap());
makeReduction(document, toRemove, reducedMap);
return entry;
}
use of datawave.query.attributes.Attribute in project datawave by NationalSecurityAgency.
the class GroupingTransform method flatten.
/**
* <pre>
* flush used the countingMap:
* [[MALE, 16],
* [MALE, 20],
* [40, MALE],
* [40, MALE],
* [MALE, 22] x 2,
* [FEMALE, 18],
* [MALE, 24],
* [20, MALE],
* [30, MALE],
* [FEMALE, 18],
* [34, MALE]]
*
* to create documents list: [
* {AGE=16, COUNT=1, GENDER=MALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false,
* {COUNT=1, ETA=20, GENERE=MALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false,
* {COUNT=1, ETA=40, GENERE=MALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false,
* {AGE=40, COUNT=1, GENDER=MALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false,
* {COUNT=2, ETA=22, GENERE=MALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false,
* {AGE=18, COUNT=1, GENDER=FEMALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false,
* {COUNT=1, ETA=24, GENERE=MALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false,
* {AGE=20, COUNT=1, GENDER=MALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false,
* {AGE=30, COUNT=1, GENDER=MALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false,
* {COUNT=1, ETA=18, GENERE=FEMALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false,
* {AGE=34, COUNT=1, GENDER=MALE}:20130101_0 test%00;-d5uxna.msizfm.-oxy0iu: [ALL] 1356998400000 false]
*
* which is then flattened to just one document with the fields and counts correlated with a grouping context suffix:
*
* {
* AGE.0=16, GENDER.0=MALE, COUNT.0=1,
* ETA.1=20, GENERE.1=MALE, COUNT.1=1,
* ETA.2=40, GENERE.2=MALE, COUNT.2=1,
* AGE.3=40, GENDER.3=MALE, COUNT.3=1,
* ETA.4=22, GENERE.4=MALE, COUNT.4=2,
* AGE.5=18, GENDER.5=FEMALE, COUNT.5=1,
* ETA.6=24, GENERE.6=MALE, COUNT.6=1,
* AGE.7=20, GENDER.7=MALE, COUNT.7=1,
* AGE.8=30, GENDER.8=MALE, COUNT.8=1,
* ETA.9=18, GENERE.9=FEMALE, COUNT.9=1,
* AGE.A=34, GENDER.A=MALE, COUNT.A=1,
* }
* </pre>
*
* The Attributes, which have had their visibilities merged, are copied into normal TypeAttributes for serialization to the webserver.
*
* @param documents
*/
private void flatten(List<Document> documents) {
log.trace("flatten {}", documents);
Document theDocument = new Document(documents.get(documents.size() - 1).getMetadata(), true);
int context = 0;
Set<ColumnVisibility> visibilities = new HashSet<>();
for (Document document : documents) {
log.trace("document: {}", document);
for (Entry<String, Attribute<? extends Comparable<?>>> entry : document.entrySet()) {
String name = entry.getKey();
visibilities.add(entry.getValue().getColumnVisibility());
Attribute<? extends Comparable<?>> attribute = entry.getValue();
attribute.setColumnVisibility(entry.getValue().getColumnVisibility());
// call copy() on the GroupingTypeAttribute to get a plain TypeAttribute
// instead of a GroupingTypeAttribute that is package protected and won't serialize
theDocument.put(name + "." + Integer.toHexString(context).toUpperCase(), (TypeAttribute) attribute.copy(), true, false);
}
context++;
}
ColumnVisibility combinedVisibility = combine(visibilities);
log.trace("combined visibilities: {} to {}", visibilities, combinedVisibility);
theDocument.setColumnVisibility(combinedVisibility);
documents.clear();
log.trace("flattened document: {}", theDocument);
documents.add(theDocument);
}
use of datawave.query.attributes.Attribute in project datawave by NationalSecurityAgency.
the class QueryLogicTestHarness method assertLogicResults.
// =============================================
// assert methods
/**
* Determines if the correct results were obtained for a query.
*
* @param logic
* key/value response data
* @param expected
* list of key values expected within response data
* @param checkers
* list of additional validation methods
*/
public void assertLogicResults(BaseQueryLogic<Map.Entry<Key, Value>> logic, Collection<String> expected, List<DocumentChecker> checkers) {
Set<String> actualResults = new HashSet<>();
if (log.isDebugEnabled()) {
log.debug(" ====== expected id(s) ======");
for (String e : expected) {
log.debug("id(" + e + ")");
}
}
for (Map.Entry<Key, Value> entry : logic) {
if (FinalDocumentTrackingIterator.isFinalDocumentKey(entry.getKey())) {
continue;
}
final Document document = this.deserializer.apply(entry).getValue();
// check all of the types to ensure that all are keepers as defined in the
// AttributeFactory class
int count = 0;
for (Attribute<? extends Comparable<?>> attribute : document.getAttributes()) {
if (attribute instanceof TimingMetadata) {
// ignore
} else if (attribute instanceof Attributes) {
Attributes attrs = (Attributes) attribute;
Collection<Class<?>> types = new HashSet<>();
for (Attribute<? extends Comparable<?>> attr : attrs.getAttributes()) {
count++;
if (attr instanceof TypeAttribute) {
Type<? extends Comparable<?>> type = ((TypeAttribute<?>) attr).getType();
if (Objects.nonNull(type)) {
types.add(type.getClass());
}
}
}
Assert.assertEquals(AttributeFactory.getKeepers(types), types);
} else {
count++;
}
}
// ignore empty documents (possible when only passing FinalDocument back)
if (count == 0) {
continue;
}
// parse the document
String extractedResult = this.parser.parse(entry.getKey(), document);
log.debug("result(" + extractedResult + ") key(" + entry.getKey() + ") document(" + document + ")");
// verify expected results
Assert.assertNotNull("extracted result", extractedResult);
Assert.assertFalse("duplicate result(" + extractedResult + ") key(" + entry.getKey() + ")", actualResults.contains(extractedResult));
actualResults.add(extractedResult);
// perform any custom assert checks on document
for (final DocumentChecker check : checkers) {
check.assertValid(document);
}
}
log.info("total records found(" + actualResults.size() + ") expected(" + expected.size() + ")");
// ensure that the complete expected result set exists
if (expected.size() > actualResults.size()) {
final Set<String> notFound = new HashSet<>(expected);
notFound.removeAll(actualResults);
for (final String m : notFound) {
log.error("missing result(" + m + ")");
}
} else if (expected.size() < actualResults.size()) {
final Set<String> extra = new HashSet<>(actualResults);
extra.removeAll(expected);
for (final String r : extra) {
log.error("unexpected result(" + r + ")");
}
}
Assert.assertEquals("results do not match expected", expected.size(), actualResults.size());
Assert.assertTrue("expected and actual values do not match", expected.containsAll(actualResults));
Assert.assertTrue("expected and actual values do not match", actualResults.containsAll(expected));
}
use of datawave.query.attributes.Attribute in project datawave by NationalSecurityAgency.
the class ResponseFieldChecker method assertValid.
/**
* Verifies the query response document contains all of the return fields.
*
* @param doc
* query response document
*/
@Override
public void assertValid(final Document doc) {
for (final String field : this.fields) {
final Attribute val = doc.get(field);
Assert.assertNotNull("missing return field(" + field + ")", val);
if (val instanceof Attributes) {
Attributes multiAttr = (Attributes) val;
for (Attribute attr : multiAttr.getAttributes()) {
Assert.assertNotNull("missing metadata for field(" + field + ")", attr.getMetadata());
}
} else {
Assert.assertNotNull("missing metadata for field(" + field + ")", val.getMetadata());
}
}
for (final String field : this.missing) {
final Attribute val = doc.get(field);
Assert.assertNull("blacklisted return field(" + field + ")", val);
}
}
use of datawave.query.attributes.Attribute in project datawave by NationalSecurityAgency.
the class EventDataQueryExpressionVisitor method extractTypes.
public static Set<Type> extractTypes(AttributeFactory attrFactory, String fieldName, String fieldValue, Key key) {
final Set<Type> types = new HashSet<>();
final Queue<Attribute<?>> attrQueue = new LinkedList<>();
attrQueue.add(attrFactory.create(fieldName, fieldValue, key, true));
Attribute<?> attr;
while ((attr = attrQueue.poll()) != null) {
if (TypeAttribute.class.isAssignableFrom(attr.getClass())) {
TypeAttribute dta = (TypeAttribute) attr;
Type t = dta.getType();
types.add(t);
} else if (AttributeBag.class.isAssignableFrom(attr.getClass())) {
attrQueue.addAll(((AttributeBag<?>) attr).getAttributes());
} else {
log.warn("Unexpected attribute type when extracting type: " + attr.getClass().getCanonicalName());
}
}
return types;
}
Aggregations