Search in sources :

Example 1 with TLSHHashMatcher

use of org.apache.nifi.processors.cybersecurity.matchers.TLSHHashMatcher in project nifi by apache.

the class CompareFuzzyHash method onTrigger.

@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final ComponentLog logger = getLogger();
    String algorithm = context.getProperty(HASH_ALGORITHM).getValue();
    final String attributeName = context.getProperty(ATTRIBUTE_NAME).getValue();
    String inputHash = flowFile.getAttribute(attributeName);
    if (inputHash == null) {
        getLogger().info("FlowFile {} lacks the required '{}' attribute, routing to failure.", new Object[] { flowFile, attributeName });
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    FuzzyHashMatcher fuzzyHashMatcher = null;
    switch(algorithm) {
        case tlsh:
            fuzzyHashMatcher = new TLSHHashMatcher(getLogger());
            break;
        case ssdeep:
            fuzzyHashMatcher = new SSDeepHashMatcher(getLogger());
            break;
        default:
            getLogger().error("Seems like the processor is configured to use unsupported algorithm '{}' ? Yielding.", new Object[] { algorithm });
            context.yield();
            return;
    }
    if (fuzzyHashMatcher.isValidHash(inputHash) == false) {
        // and if that is the case we log
        logger.error("Invalid hash provided. Sending to failure");
        // and send to failure
        session.transfer(flowFile, REL_FAILURE);
        session.commit();
        return;
    }
    double similarity = 0;
    double matchThreshold = context.getProperty(MATCH_THRESHOLD).asDouble();
    try {
        Map<String, Double> matched = new ConcurrentHashMap<String, Double>();
        BufferedReader reader = fuzzyHashMatcher.getReader(context.getProperty(HASH_LIST_FILE).getValue());
        String line = null;
        iterateFile: while ((line = reader.readLine()) != null) {
            if (line != null) {
                similarity = fuzzyHashMatcher.getSimilarity(inputHash, line);
                if (fuzzyHashMatcher.matchExceedsThreshold(similarity, matchThreshold)) {
                    String match = fuzzyHashMatcher.getMatch(line);
                    // Because this would simply look odd, we ignore such entry and log
                    if (!StringUtils.isEmpty(match)) {
                        matched.put(match, similarity);
                    } else {
                        logger.error("Found a match against a malformed entry '{}'. Please inspect the contents of" + "the {} file and ensure they are properly formatted", new Object[] { line, HASH_LIST_FILE.getDisplayName() });
                    }
                }
            }
            // Check if single match is desired and if a match has been made
            if (context.getProperty(MATCHING_MODE).getValue() == singleMatch.getValue() && (matched.size() > 0)) {
                // and save time by breaking the outer loop
                break iterateFile;
            }
        }
        // no matter if the break was called or not, Continue processing
        // First by creating a new map to hold attributes
        Map<String, String> attributes = new ConcurrentHashMap<String, String>();
        // Then by iterating over the hashmap of matches
        if (matched.size() > 0) {
            int x = 0;
            for (Map.Entry<String, Double> entry : matched.entrySet()) {
                // defining attributes accordingly
                attributes.put(attributeName + "." + x + ".match", entry.getKey());
                attributes.put(attributeName + "." + x + ".similarity", String.valueOf(entry.getValue()));
                x++;
            }
            // Finally, append the attributes to the flowfile and sent to match
            flowFile = session.putAllAttributes(flowFile, attributes);
            session.transfer(flowFile, REL_FOUND);
            session.commit();
            return;
        } else {
            // Otherwise send it to non-match
            session.transfer(flowFile, REL_NOT_FOUND);
            session.commit();
            return;
        }
    } catch (IOException e) {
        logger.error("Error while reading the hash input source");
        context.yield();
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) FuzzyHashMatcher(org.apache.nifi.processors.cybersecurity.matchers.FuzzyHashMatcher) TLSHHashMatcher(org.apache.nifi.processors.cybersecurity.matchers.TLSHHashMatcher) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) BufferedReader(java.io.BufferedReader) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) SSDeepHashMatcher(org.apache.nifi.processors.cybersecurity.matchers.SSDeepHashMatcher) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap)

Aggregations

BufferedReader (java.io.BufferedReader)1 IOException (java.io.IOException)1 Map (java.util.Map)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 FlowFile (org.apache.nifi.flowfile.FlowFile)1 ComponentLog (org.apache.nifi.logging.ComponentLog)1 FuzzyHashMatcher (org.apache.nifi.processors.cybersecurity.matchers.FuzzyHashMatcher)1 SSDeepHashMatcher (org.apache.nifi.processors.cybersecurity.matchers.SSDeepHashMatcher)1 TLSHHashMatcher (org.apache.nifi.processors.cybersecurity.matchers.TLSHHashMatcher)1