Search in sources :

Example 1 with TermFrequencyList

use of datawave.query.jexl.functions.TermFrequencyList in project datawave by NationalSecurityAgency.

the class TermOffsetPopulator method getContextMap.

/**
 * Build TermOffset map for use in JexlEvaluation
 *
 * @param docKey
 *            key that maps to a document
 * @param keys
 *            set of keys that map to hits on tf fields
 * @param fields
 *            set of fields to remove from the search space
 * @return
 */
public Map<String, Object> getContextMap(Key docKey, Set<Key> keys, Set<String> fields) {
    document = new Document();
    TermFrequencyIterator tfSource;
    // Do not prune if no fields exist or if the tf fields would prune to nothing. TODO skip tf entirely if this would prune to zero
    if (fields == null || fields.isEmpty() || fields.size() == termFrequencyFieldValues.keySet().size()) {
        tfSource = new TermFrequencyIterator(termFrequencyFieldValues, keys);
    } else {
        // There are fields to remove, reduce the search space and continue
        Multimap<String, String> tfFVs = HashMultimap.create(termFrequencyFieldValues);
        fields.forEach(tfFVs::removeAll);
        tfSource = new TermFrequencyIterator(tfFVs, keys);
        if (tfFVs.size() == 0) {
            log.error("Created a TFIter with no field values. Orig fields: " + termFrequencyFieldValues.keySet() + " fields to remove: " + fields);
        }
    }
    Range range = getRange(keys);
    try {
        tfSource.init(source, null, null);
        tfSource.seek(getRange(keys), null, false);
    } catch (IOException e) {
        log.error("Seek to the range failed: " + range, e);
    }
    // set the document context on the filter
    if (evaluationFilter != null) {
        evaluationFilter.startNewDocument(docKey);
    }
    Map<String, TermFrequencyList> termOffsetMap = Maps.newHashMap();
    while (tfSource.hasTop()) {
        Key key = tfSource.getTopKey();
        FieldValue fv = FieldValue.getFieldValue(key);
        // add the zone and term to our internal document
        Content attr = new Content(fv.getValue(), source.getTopKey(), evaluationFilter == null || evaluationFilter.keep(key));
        // no need to apply the evaluation filter here as the TermFrequencyIterator above is already doing more filtering than we can do here.
        // So this filter is simply extraneous. However if the an EventDataQueryFilter implementation gets smarter somehow, then it can be added back in
        // here.
        // For example the AncestorQueryLogic may require this....
        // if (evaluationFilter == null || evaluationFilter.apply(Maps.immutableEntry(key, StringUtils.EMPTY_STRING))) {
        this.document.put(fv.getField(), attr);
        TreeMultimap<TermFrequencyList.Zone, TermWeightPosition> offsets = TreeMultimap.create();
        try {
            TermWeight.Info twInfo = TermWeight.Info.parseFrom(tfSource.getTopValue().get());
            // if no content expansion fields then assume every field is permitted for unfielded content functions
            TermFrequencyList.Zone twZone = new TermFrequencyList.Zone(fv.getField(), (contentExpansionFields == null || contentExpansionFields.isEmpty() || contentExpansionFields.contains(fv.getField())), TermFrequencyList.getEventId(key));
            TermWeightPosition.Builder position = new TermWeightPosition.Builder();
            for (int i = 0; i < twInfo.getTermOffsetCount(); i++) {
                position.setTermWeightOffsetInfo(twInfo, i);
                offsets.put(twZone, position.build());
                position.reset();
            }
        } catch (InvalidProtocolBufferException e) {
            log.error("Could not deserialize TermWeight protocol buffer for: " + source.getTopKey());
            return null;
        }
        // First time looking up this term in a field
        TermFrequencyList tfl = termOffsetMap.get(fv.getValue());
        if (null == tfl) {
            termOffsetMap.put(fv.getValue(), new TermFrequencyList(offsets));
        } else {
            // Merge in the offsets for the current field+term with all previous
            // offsets from other fields in the same term
            tfl.addOffsets(offsets);
        }
        try {
            tfSource.next();
        } catch (IOException ioe) {
            log.error("Next failed: " + range, ioe);
            break;
        }
    }
    // Load the actual map into map that will be put into the JexlContext
    Map<String, Object> map = new HashMap<>();
    map.put(Constants.TERM_OFFSET_MAP_JEXL_VARIABLE_NAME, termOffsetMap);
    return map;
}
Also used : HashMap(java.util.HashMap) TermFrequencyIterator(datawave.core.iterators.TermFrequencyIterator) Document(datawave.query.attributes.Document) TermWeightPosition(datawave.ingest.protobuf.TermWeightPosition) TermFrequencyList(datawave.query.jexl.functions.TermFrequencyList) InvalidProtocolBufferException(com.google.protobuf.InvalidProtocolBufferException) IOException(java.io.IOException) Range(org.apache.accumulo.core.data.Range) Content(datawave.query.attributes.Content) TermWeight(datawave.ingest.protobuf.TermWeight) Key(org.apache.accumulo.core.data.Key)

Example 2 with TermFrequencyList

use of datawave.query.jexl.functions.TermFrequencyList in project datawave by NationalSecurityAgency.

the class JexlEvaluationTest method testContentPhraseFunction.

@Test
public void testContentPhraseFunction() {
    String query = "FOO == 'bar' && TOKFIELD == 'big' && TOKFIELD == 'red' && TOKFIELD == 'dog' && content:phrase(termOffsetMap, 'big', 'red', 'dog')";
    Map<String, TermFrequencyList> map = new HashMap<>();
    map.put("big", buildTfList("TOKFIELD", 1));
    map.put("red", buildTfList("TOKFIELD", 2));
    map.put("dog", buildTfList("TOKFIELD", 3));
    DatawaveJexlContext context = new DatawaveJexlContext();
    context.set(Constants.TERM_OFFSET_MAP_JEXL_VARIABLE_NAME, map);
    Key docKey = new Key("shard", "datatype\0uid");
    Document d = new Document();
    d.put("FOO", new Content("bar", docKey, true));
    d.put("TOKFIELD", new Content("big", docKey, true));
    d.put("TOKFIELD", new Content("red", docKey, true));
    d.put("TOKFIELD", new Content("dog", docKey, true));
    d.visit(Arrays.asList("FOO", "TOKFIELD"), context);
    JexlEvaluation evaluation = new JexlEvaluation(query, new HitListArithmetic());
    Tuple3<Key, Document, DatawaveJexlContext> tuple = new Tuple3<>(docKey, d, context);
    boolean result = evaluation.apply(tuple);
    assertTrue(result);
    // assert that "big red dog" came back in the hit terms
    boolean foundPhrase = false;
    Attributes attrs = (Attributes) d.get("HIT_TERM");
    for (Attribute<?> attr : attrs.getAttributes()) {
        if (attr.getData().equals("TOKFIELD:big red dog")) {
            foundPhrase = true;
        }
    }
    assertEquals(5, attrs.size());
    assertTrue(foundPhrase);
}
Also used : TermFrequencyList(datawave.query.jexl.functions.TermFrequencyList) HitListArithmetic(datawave.query.jexl.HitListArithmetic) HashMap(java.util.HashMap) Attributes(datawave.query.attributes.Attributes) Document(datawave.query.attributes.Document) Content(datawave.query.attributes.Content) Tuple3(datawave.query.util.Tuple3) DatawaveJexlContext(datawave.query.jexl.DatawaveJexlContext) Key(org.apache.accumulo.core.data.Key) Test(org.junit.Test)

Example 3 with TermFrequencyList

use of datawave.query.jexl.functions.TermFrequencyList in project datawave by NationalSecurityAgency.

the class TermOffsetPopulator method mergeContextMap.

// merge two maps presuming both came from getContextMap()
@SuppressWarnings("unchecked")
public static Map<String, Object> mergeContextMap(Map<String, Object> map1, Map<String, Object> map2) {
    Map<String, Object> map = new HashMap<>();
    Map<String, TermFrequencyList> termOffsetMap = Maps.newHashMap();
    Map<String, TermFrequencyList> termOffsetMap1 = (Map<String, TermFrequencyList>) (map1.get(Constants.TERM_OFFSET_MAP_JEXL_VARIABLE_NAME));
    Map<String, TermFrequencyList> termOffsetMap2 = (Map<String, TermFrequencyList>) (map2.get(Constants.TERM_OFFSET_MAP_JEXL_VARIABLE_NAME));
    if (termOffsetMap1 == null) {
        if (termOffsetMap2 != null) {
            termOffsetMap.putAll(termOffsetMap2);
        }
    } else {
        termOffsetMap.putAll(termOffsetMap1);
        if (termOffsetMap2 != null) {
            for (Map.Entry<String, TermFrequencyList> entry : termOffsetMap2.entrySet()) {
                String key = entry.getKey();
                TermFrequencyList list1 = termOffsetMap.get(key);
                TermFrequencyList list2 = entry.getValue();
                if (list1 == null) {
                    termOffsetMap.put(key, list2);
                } else if (list2 != null) {
                    termOffsetMap.put(key, TermFrequencyList.merge(list1, list2));
                }
            }
        }
    }
    // Load the actual map into map that will be put into the JexlContext
    map.put(Constants.TERM_OFFSET_MAP_JEXL_VARIABLE_NAME, termOffsetMap);
    return map;
}
Also used : TermFrequencyList(datawave.query.jexl.functions.TermFrequencyList) HashMap(java.util.HashMap) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

TermFrequencyList (datawave.query.jexl.functions.TermFrequencyList)3 HashMap (java.util.HashMap)3 Content (datawave.query.attributes.Content)2 Document (datawave.query.attributes.Document)2 Key (org.apache.accumulo.core.data.Key)2 InvalidProtocolBufferException (com.google.protobuf.InvalidProtocolBufferException)1 TermFrequencyIterator (datawave.core.iterators.TermFrequencyIterator)1 TermWeight (datawave.ingest.protobuf.TermWeight)1 TermWeightPosition (datawave.ingest.protobuf.TermWeightPosition)1 Attributes (datawave.query.attributes.Attributes)1 DatawaveJexlContext (datawave.query.jexl.DatawaveJexlContext)1 HitListArithmetic (datawave.query.jexl.HitListArithmetic)1 Tuple3 (datawave.query.util.Tuple3)1 IOException (java.io.IOException)1 Map (java.util.Map)1 Range (org.apache.accumulo.core.data.Range)1 Test (org.junit.Test)1