use of datawave.query.function.DocumentCountCardinality in project datawave by NationalSecurityAgency.
the class DynamicFacetIterator method getDocumentIterator.
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public Iterator<Entry<Key, Document>> getDocumentIterator(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException, ConfigException, InstantiationException, IllegalAccessException {
// Otherwise, we have to use the field index
// Seek() the boolean logic stuff
createAndSeekIndexIterator(range, columnFamilies, inclusive);
Function<Entry<Key, Document>, Entry<DocumentData, Document>> keyToDoc = null;
// TODO consider using the new EventDataQueryExpressionFilter
EventDataQueryFieldFilter projection = null;
Iterator<Entry<Key, Document>> documents = null;
if (!configuration.getFacetedFields().isEmpty()) {
projection = new EventDataQueryFieldFilter();
projection.initializeWhitelist(configuration.getFacetedFields());
}
if (!configuration.hasFieldLimits() || projection != null) {
keyToDoc = new KeyToDocumentData(source.deepCopy(myEnvironment), super.equality, projection, this.includeHierarchyFields, this.includeHierarchyFields);
}
AccumuloTreeIterable<Key, DocumentData> doc = null;
if (null != keyToDoc) {
doc = new AccumuloTreeIterable<>(fieldIndexResults.tree, keyToDoc);
} else {
if (log.isTraceEnabled()) {
log.trace("Skipping document lookup, because we don't need it");
}
doc = new AccumuloTreeIterable<>(fieldIndexResults.tree, new Function<Entry<Key, Document>, Entry<DocumentData, Document>>() {
@Override
@Nullable
public Entry<DocumentData, Document> apply(@Nullable Entry<Key, Document> input) {
Set<Key> docKeys = Sets.newHashSet();
List<Entry<Key, Value>> attrs = Lists.newArrayList();
return Maps.immutableEntry(new DocumentData(input.getKey(), docKeys, attrs, true), input.getValue());
}
});
}
doc.seek(range, columnFamilies, inclusive);
TypeMetadata typeMetadata = this.getTypeMetadata();
documents = Iterators.transform(doc.iterator(), new Aggregation(this.getTimeFilter(), typeMetadata, compositeMetadata, this.isIncludeGroupingContext(), this.includeRecordId, false, null));
switch(configuration.getType()) {
case SHARD_COUNT:
case DAY_COUNT:
SortedKeyValueIterator<Key, Value> sourceDeepCopy = source.deepCopy(myEnvironment);
documents = getEvaluation(sourceDeepCopy, documents, compositeMetadata, typeMetadata, columnFamilies, inclusive);
// Take the document Keys and transform it into Entry<Key,Document>, removing Attributes for this Document
// which do not fall within the expected time range
documents = Iterators.transform(documents, new DocumentCountCardinality(configuration.getType(), !merge));
default:
break;
}
return documents;
}
Aggregations