use of datawave.query.iterator.profile.EvaluationTrackingNestedIterator in project datawave by NationalSecurityAgency.
the class QueryIterator method seek.
@Override
public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException {
// preserve the original range for use with the Final Document tracking iterator because it is placed after the ResultCountingIterator
// so the FinalDocumentTracking iterator needs the start key with the count already appended
originalRange = range;
getActiveQueryLog().get(getQueryId()).beginCall(this.originalRange, ActiveQuery.CallType.SEEK);
Span span = Trace.start("QueryIterator.seek");
if (!this.isIncludeGroupingContext() && (this.query.contains("grouping:") || this.query.contains("matchesInGroup") || this.query.contains("MatchesInGroup") || this.query.contains("atomValuesMatch"))) {
this.setIncludeGroupingContext(true);
this.groupingContextAddedByMe = true;
} else {
this.groupingContextAddedByMe = false;
}
try {
if (log.isDebugEnabled()) {
log.debug("Seek range: " + range + " " + query);
}
this.range = range;
// determine whether this is a teardown/rebuild range
long resultCount = 0;
if (!range.isStartKeyInclusive()) {
// see if we can fail fast. If we were rebuilt with the FinalDocument key, then we are already completely done
if (collectTimingDetails && FinalDocumentTrackingIterator.isFinalDocumentKey(range.getStartKey())) {
this.seekKeySource = new EmptyTreeIterable();
this.serializedDocuments = EmptyIterator.emptyIterator();
prepareKeyValue(span);
return;
}
// see if we have a count in the cf
Key startKey = range.getStartKey();
String[] parts = StringUtils.split(startKey.getColumnFamily().toString(), '\0');
if (parts.length == 3) {
resultCount = NumericalEncoder.decode(parts[0]).longValue();
// remove the count from the range
startKey = new Key(startKey.getRow(), new Text(parts[1] + '\0' + parts[2]), startKey.getColumnQualifier(), startKey.getColumnVisibility(), startKey.getTimestamp());
this.range = range = new Range(startKey, range.isStartKeyInclusive(), range.getEndKey(), range.isEndKeyInclusive());
}
}
// determine whether this is a document specific range
Range documentRange = isDocumentSpecificRange(range) ? range : null;
// is done
if (documentRange != null && !documentRange.isStartKeyInclusive()) {
if (log.isTraceEnabled()) {
log.trace("Received non-inclusive event specific range: " + documentRange);
}
if (gatherTimingDetails()) {
this.seekKeySource = new EvaluationTrackingNestedIterator(QuerySpan.Stage.EmptyTree, trackingSpan, new EmptyTreeIterable(), myEnvironment);
} else {
this.seekKeySource = new EmptyTreeIterable();
}
} else // if the Range is for a single document and the query doesn't reference any index-only or tokenized fields
if (documentRange != null && (!this.isContainsIndexOnlyTerms() && this.getTermFrequencyFields().isEmpty() && !super.mustUseFieldIndex)) {
if (log.isTraceEnabled()) {
log.trace("Received event specific range: " + documentRange);
}
// We can take a shortcut to the directly to the event
Map.Entry<Key, Document> documentKey = Maps.immutableEntry(super.getDocumentKey.apply(documentRange), new Document());
if (log.isTraceEnabled()) {
log.trace("Transformed document key: " + documentKey);
}
if (gatherTimingDetails()) {
this.seekKeySource = new EvaluationTrackingNestedIterator(QuerySpan.Stage.DocumentSpecificTree, trackingSpan, new DocumentSpecificNestedIterator(documentKey), myEnvironment);
} else {
this.seekKeySource = new DocumentSpecificNestedIterator(documentKey);
}
} else {
this.seekKeySource = buildDocumentIterator(documentRange, range, columnFamilies, inclusive);
}
// Create the pipeline iterator for document aggregation and
// evaluation within a thread pool
PipelineIterator pipelineIter = PipelineFactory.createIterator(this.seekKeySource, getMaxEvaluationPipelines(), getMaxPipelineCachedResults(), getSerialPipelineRequest(), querySpanCollector, trackingSpan, this, sourceForDeepCopies.deepCopy(myEnvironment), myEnvironment, yield, yieldThresholdMs, columnFamilies, inclusive);
pipelineIter.setCollectTimingDetails(collectTimingDetails);
// TODO pipelineIter.setStatsdHostAndPort(statsdHostAndPort);
pipelineIter.startPipeline();
// gather Key,Document Entries from the pipelines
Iterator<Entry<Key, Document>> pipelineDocuments = pipelineIter;
if (log.isTraceEnabled()) {
pipelineDocuments = Iterators.filter(pipelineDocuments, keyDocumentEntry -> {
log.trace("after pipeline, keyDocumentEntry:" + keyDocumentEntry);
return true;
});
}
// now apply the unique transform if requested
UniqueTransform uniquify = getUniqueTransform();
if (uniquify != null) {
pipelineDocuments = Iterators.filter(pipelineDocuments, uniquify.getUniquePredicate());
}
// apply the grouping transform if requested and if the batch size is greater than zero
// if the batch size is 0, then grouping is computed only on the web server
GroupingTransform groupify = getGroupingTransform();
if (groupify != null && this.groupFieldsBatchSize > 0) {
pipelineDocuments = groupingTransform.getGroupingIterator(pipelineDocuments, this.groupFieldsBatchSize, this.yield);
if (log.isTraceEnabled()) {
pipelineDocuments = Iterators.filter(pipelineDocuments, keyDocumentEntry -> {
log.trace("after grouping, keyDocumentEntry:" + keyDocumentEntry);
return true;
});
}
}
pipelineDocuments = Iterators.filter(pipelineDocuments, keyDocumentEntry -> {
// last chance before the documents are serialized
getActiveQueryLog().get(getQueryId()).recordStats(keyDocumentEntry.getValue(), querySpanCollector.getCombinedQuerySpan(null));
// Always return true since we just want to record data in the ActiveQueryLog
return true;
});
if (this.getReturnType() == ReturnType.kryo) {
// Serialize the Document using Kryo
this.serializedDocuments = Iterators.transform(pipelineDocuments, new KryoDocumentSerializer(isReducedResponse(), isCompressResults()));
} else if (this.getReturnType() == ReturnType.writable) {
// Use the Writable interface to serialize the Document
this.serializedDocuments = Iterators.transform(pipelineDocuments, new WritableDocumentSerializer(isReducedResponse()));
} else if (this.getReturnType() == ReturnType.tostring) {
// Just return a toString() representation of the document
this.serializedDocuments = Iterators.transform(pipelineDocuments, new ToStringDocumentSerializer(isReducedResponse()));
} else {
throw new IllegalArgumentException("Unknown return type of: " + this.getReturnType());
}
if (log.isTraceEnabled()) {
KryoDocumentDeserializer dser = new KryoDocumentDeserializer();
this.serializedDocuments = Iterators.filter(this.serializedDocuments, keyValueEntry -> {
log.trace("after serializing, keyValueEntry:" + dser.apply(keyValueEntry));
return true;
});
}
// Cannot do this on document specific ranges as the count would place the keys outside the initial range
if (!sortedUIDs && documentRange == null) {
this.serializedDocuments = new ResultCountingIterator(serializedDocuments, resultCount, yield);
} else if (this.sortedUIDs) {
// we have sorted UIDs, so we can mask out the cq
this.serializedDocuments = new KeyAdjudicator<>(serializedDocuments, yield);
}
// only add the final document tracking iterator which sends stats back to the client if collectTimingDetails is true
if (collectTimingDetails) {
// if there is no document to return, then add an empty document
// to store the timing metadata
this.serializedDocuments = new FinalDocumentTrackingIterator(querySpanCollector, trackingSpan, originalRange, this.serializedDocuments, this.getReturnType(), this.isReducedResponse(), this.isCompressResults(), this.yield);
}
if (log.isTraceEnabled()) {
KryoDocumentDeserializer dser = new KryoDocumentDeserializer();
this.serializedDocuments = Iterators.filter(this.serializedDocuments, keyValueEntry -> {
log.debug("finally, considering:" + dser.apply(keyValueEntry));
return true;
});
}
// Determine if we have items to return
prepareKeyValue(span);
} catch (Exception e) {
handleException(e);
} finally {
if (gatherTimingDetails() && trackingSpan != null && querySpanCollector != null) {
querySpanCollector.addQuerySpan(trackingSpan);
}
if (null != span) {
span.stop();
}
QueryStatsDClient client = getStatsdClient();
if (client != null) {
client.flush();
}
getActiveQueryLog().get(getQueryId()).endCall(this.originalRange, ActiveQuery.CallType.SEEK);
if (this.key == null && this.value == null) {
// no entries to return
getActiveQueryLog().remove(getQueryId(), this.originalRange);
}
}
}
use of datawave.query.iterator.profile.EvaluationTrackingNestedIterator in project datawave by NationalSecurityAgency.
the class QueryIterator method buildDocumentIterator.
/**
* Build the document iterator
*
* @param documentRange
* @param seekRange
* @param columnFamilies
* @param inclusive
* @return
* @throws IOException
*/
protected NestedIterator<Key> buildDocumentIterator(Range documentRange, Range seekRange, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException, ConfigException, InstantiationException, IllegalAccessException {
NestedIterator<Key> docIter = null;
if (log.isTraceEnabled()) {
log.trace("Batched queries is " + batchedQueries);
}
if (batchedQueries >= 1) {
List<NestedQuery<Key>> nests = Lists.newArrayList();
for (Entry<Range, String> queries : batchStack) {
Range myRange = queries.getKey();
if (log.isTraceEnabled()) {
log.trace("Adding " + myRange + " from seekrange " + seekRange);
}
/*
* Only perform the following checks if start key is not infinite and document range is specified
*/
if (null != seekRange && !seekRange.isInfiniteStartKey()) {
Key seekStartKey = seekRange.getStartKey();
Key myStartKey = myRange.getStartKey();
/*
* if our seek key is greater than our start key we can skip this batched query. myStartKey.compareTo(seekStartKey) must be <= 0, which
* means that startKey must be greater than or equal be seekStartKey
*/
if (null != myStartKey && null != seekStartKey && !seekRange.contains(myStartKey)) {
if (log.isTraceEnabled()) {
log.trace("skipping " + myRange);
}
continue;
}
}
JexlArithmetic myArithmetic;
if (arithmetic instanceof StatefulArithmetic) {
myArithmetic = ((StatefulArithmetic) arithmetic).clone();
} else {
myArithmetic = new DefaultArithmetic();
}
// Parse the query
ASTJexlScript myScript = null;
JexlEvaluation eval = null;
try {
myScript = JexlASTHelper.parseJexlQuery(queries.getValue());
eval = new JexlEvaluation(queries.getValue(), myArithmetic);
} catch (Exception e) {
throw new IOException("Could not parse the JEXL query: '" + this.getQuery() + "'", e);
}
// If we had an event-specific range previously, we need to
// reset it back
// to the source we created during init
NestedIterator<Key> subDocIter = getOrSetKeySource(myRange, myScript);
if (log.isTraceEnabled()) {
log.trace("Using init()'ialized source: " + subDocIter.getClass().getName());
}
if (gatherTimingDetails()) {
subDocIter = new EvaluationTrackingNestedIterator(QuerySpan.Stage.FieldIndexTree, trackingSpan, subDocIter, myEnvironment);
}
// Seek() the boolean logic stuff
((SeekableIterator) subDocIter).seek(myRange, columnFamilies, inclusive);
NestedQuery<Key> nestedQueryObj = new NestedQuery<>();
nestedQueryObj.setQuery(queries.getValue());
nestedQueryObj.setIterator(subDocIter);
nestedQueryObj.setQueryScript(myScript);
nestedQueryObj.setEvaluation(eval);
nestedQueryObj.setRange(queries.getKey());
nests.add(nestedQueryObj);
}
docIter = new NestedQueryIterator<>(nests);
// now lets start off the nested iterator
docIter.initialize();
initKeySource = docIter;
} else {
// If we had an event-specific range previously, we need to reset it back
// to the source we created during init
docIter = getOrSetKeySource(documentRange, script);
initKeySource = docIter;
if (log.isTraceEnabled()) {
log.trace("Using init()'ialized source: " + this.initKeySource.getClass().getName());
}
if (gatherTimingDetails()) {
docIter = new EvaluationTrackingNestedIterator(QuerySpan.Stage.FieldIndexTree, trackingSpan, docIter, myEnvironment);
}
// Seek() the boolean logic stuff
((SeekableIterator) docIter).seek(range, columnFamilies, inclusive);
// now lets start off the nested iterator
docIter.initialize();
}
return docIter;
}
Aggregations