use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class ScanAttribute method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final List<FlowFile> flowFiles = session.get(50);
if (flowFiles.isEmpty()) {
return;
}
final ComponentLog logger = getLogger();
try {
if (fileWatcher.checkAndReset()) {
this.dictionaryTerms = createDictionary(context);
}
} catch (final IOException e) {
logger.error("Unable to reload dictionary due to {}", e);
}
final boolean matchAll = context.getProperty(MATCHING_CRITERIA).getValue().equals(MATCH_CRITERIA_ALL);
for (final FlowFile flowFile : flowFiles) {
final boolean matched = matchAll ? allMatch(flowFile, attributePattern, dictionaryTerms) : anyMatch(flowFile, attributePattern, dictionaryTerms);
final Relationship relationship = matched ? REL_MATCHED : REL_UNMATCHED;
session.getProvenanceReporter().route(flowFile, relationship);
session.transfer(flowFile, relationship);
logger.info("Transferred {} to {}", new Object[] { flowFile, relationship });
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class SplitJson method onTrigger.
@Override
public void onTrigger(final ProcessContext processContext, final ProcessSession processSession) {
FlowFile original = processSession.get();
if (original == null) {
return;
}
final ComponentLog logger = getLogger();
DocumentContext documentContext;
try {
documentContext = validateAndEstablishJsonContext(processSession, original);
} catch (InvalidJsonException e) {
logger.error("FlowFile {} did not have valid JSON content.", new Object[] { original });
processSession.transfer(original, REL_FAILURE);
return;
}
final JsonPath jsonPath = JSON_PATH_REF.get();
Object jsonPathResult;
try {
jsonPathResult = documentContext.read(jsonPath);
} catch (PathNotFoundException e) {
logger.warn("JsonPath {} could not be found for FlowFile {}", new Object[] { jsonPath.getPath(), original });
processSession.transfer(original, REL_FAILURE);
return;
}
if (!(jsonPathResult instanceof List)) {
logger.error("The evaluated value {} of {} was not a JSON Array compatible type and cannot be split.", new Object[] { jsonPathResult, jsonPath.getPath() });
processSession.transfer(original, REL_FAILURE);
return;
}
List resultList = (List) jsonPathResult;
Map<String, String> attributes = new HashMap<>();
final String fragmentId = UUID.randomUUID().toString();
attributes.put(FRAGMENT_ID.key(), fragmentId);
attributes.put(FRAGMENT_COUNT.key(), Integer.toString(resultList.size()));
for (int i = 0; i < resultList.size(); i++) {
Object resultSegment = resultList.get(i);
FlowFile split = processSession.create(original);
split = processSession.write(split, (out) -> {
String resultSegmentContent = getResultRepresentation(resultSegment, nullDefaultValue);
out.write(resultSegmentContent.getBytes(StandardCharsets.UTF_8));
});
attributes.put(SEGMENT_ORIGINAL_FILENAME.key(), split.getAttribute(CoreAttributes.FILENAME.key()));
attributes.put(FRAGMENT_INDEX.key(), Integer.toString(i));
processSession.transfer(processSession.putAllAttributes(split, attributes), REL_SPLIT);
}
original = copyAttributesToOriginal(processSession, original, fragmentId, resultList.size());
processSession.transfer(original, REL_ORIGINAL);
logger.info("Split {} into {} FlowFiles", new Object[] { original, resultList.size() });
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class TransformXml method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
final FlowFile original = session.get();
if (original == null) {
return;
}
final ComponentLog logger = getLogger();
final StopWatch stopWatch = new StopWatch(true);
final String xsltFileName = context.getProperty(XSLT_FILE_NAME).evaluateAttributeExpressions(original).getValue();
final Boolean indentOutput = context.getProperty(INDENT_OUTPUT).asBoolean();
try {
FlowFile transformed = session.write(original, new StreamCallback() {
@Override
public void process(final InputStream rawIn, final OutputStream out) throws IOException {
try (final InputStream in = new BufferedInputStream(rawIn)) {
final Templates templates;
if (cache != null) {
templates = cache.get(xsltFileName);
} else {
templates = newTemplates(context, xsltFileName);
}
final Transformer transformer = templates.newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, (indentOutput ? "yes" : "no"));
// pass all dynamic properties to the transformer
for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
if (entry.getKey().isDynamic()) {
String value = context.newPropertyValue(entry.getValue()).evaluateAttributeExpressions(original).getValue();
transformer.setParameter(entry.getKey().getName(), value);
}
}
// use a StreamSource with Saxon
StreamSource source = new StreamSource(in);
StreamResult result = new StreamResult(out);
transformer.transform(source, result);
} catch (final Exception e) {
throw new IOException(e);
}
}
});
session.transfer(transformed, REL_SUCCESS);
session.getProvenanceReporter().modifyContent(transformed, stopWatch.getElapsed(TimeUnit.MILLISECONDS));
logger.info("Transformed {}", new Object[] { original });
} catch (ProcessException e) {
logger.error("Unable to transform {} due to {}", new Object[] { original, e });
session.transfer(original, REL_FAILURE);
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class UnpackContent method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final ComponentLog logger = getLogger();
PackageFormat packagingFormat = PackageFormat.getFormat(context.getProperty(PACKAGING_FORMAT).getValue().toLowerCase());
if (packagingFormat == PackageFormat.AUTO_DETECT_FORMAT) {
packagingFormat = null;
final String mimeType = flowFile.getAttribute(CoreAttributes.MIME_TYPE.key());
if (mimeType == null) {
logger.error("No mime.type attribute set for {}; routing to failure", new Object[] { flowFile });
session.transfer(flowFile, REL_FAILURE);
return;
}
for (PackageFormat format : PackageFormat.values()) {
if (mimeType.toLowerCase().equals(format.getMimeType())) {
packagingFormat = format;
}
}
if (packagingFormat == null) {
logger.info("Cannot unpack {} because its mime.type attribute is set to '{}', which is not a format that can be unpacked; routing to 'success'", new Object[] { flowFile, mimeType });
session.transfer(flowFile, REL_SUCCESS);
return;
}
}
// set the Unpacker to use for this FlowFile. FlowFileUnpackager objects maintain state and are not reusable.
final Unpacker unpacker;
final boolean addFragmentAttrs;
switch(packagingFormat) {
case TAR_FORMAT:
case X_TAR_FORMAT:
unpacker = tarUnpacker;
addFragmentAttrs = true;
break;
case ZIP_FORMAT:
unpacker = zipUnpacker;
addFragmentAttrs = true;
break;
case FLOWFILE_STREAM_FORMAT_V2:
unpacker = new FlowFileStreamUnpacker(new FlowFileUnpackagerV2());
addFragmentAttrs = false;
break;
case FLOWFILE_STREAM_FORMAT_V3:
unpacker = new FlowFileStreamUnpacker(new FlowFileUnpackagerV3());
addFragmentAttrs = false;
break;
case FLOWFILE_TAR_FORMAT:
unpacker = new FlowFileStreamUnpacker(new FlowFileUnpackagerV1());
addFragmentAttrs = false;
break;
case AUTO_DETECT_FORMAT:
default:
// The format of the unpacker should be known before initialization
throw new ProcessException(packagingFormat + " is not a valid packaging format");
}
final List<FlowFile> unpacked = new ArrayList<>();
try {
unpacker.unpack(session, flowFile, unpacked);
if (unpacked.isEmpty()) {
logger.error("Unable to unpack {} because it does not appear to have any entries; routing to failure", new Object[] { flowFile });
session.transfer(flowFile, REL_FAILURE);
return;
}
if (addFragmentAttrs) {
finishFragmentAttributes(session, flowFile, unpacked);
}
session.transfer(unpacked, REL_SUCCESS);
final String fragmentId = unpacked.size() > 0 ? unpacked.get(0).getAttribute(FRAGMENT_ID) : null;
flowFile = FragmentAttributes.copyAttributesToOriginal(session, flowFile, fragmentId, unpacked.size());
session.transfer(flowFile, REL_ORIGINAL);
session.getProvenanceReporter().fork(flowFile, unpacked);
logger.info("Unpacked {} into {} and transferred to success", new Object[] { flowFile, unpacked });
} catch (final ProcessException | InvalidPathException e) {
logger.error("Unable to unpack {} due to {}; routing to failure", new Object[] { flowFile, e });
session.transfer(flowFile, REL_FAILURE);
session.remove(unpacked);
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class HashAttribute method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final Map<String, Pattern> patterns = regexMapRef.get();
final ComponentLog logger = getLogger();
final SortedMap<String, String> attributes = getRelevantAttributes(flowFile, patterns);
if (attributes.size() != patterns.size()) {
final Set<String> wantedKeys = patterns.keySet();
final Set<String> foundKeys = attributes.keySet();
final StringBuilder missingKeys = new StringBuilder();
for (final String wantedKey : wantedKeys) {
if (!foundKeys.contains(wantedKey)) {
missingKeys.append(wantedKey).append(" ");
}
}
logger.error("routing {} to 'failure' because of missing attributes: {}", new Object[] { flowFile, missingKeys.toString() });
session.transfer(flowFile, REL_FAILURE);
} else {
// create single string of attribute key/value pairs to use for group ID hash
final StringBuilder hashableValue = new StringBuilder();
for (final Map.Entry<String, String> entry : attributes.entrySet()) {
hashableValue.append(entry.getKey());
if (StringUtils.isBlank(entry.getValue())) {
hashableValue.append("EMPTY");
} else {
hashableValue.append(entry.getValue());
}
}
// create group ID
final String hashValue = DigestUtils.md5Hex(hashableValue.toString());
logger.info("adding Hash Value {} to attributes for {} and routing to success", new Object[] { hashValue, flowFile });
flowFile = session.putAttribute(flowFile, context.getProperty(HASH_VALUE_ATTRIBUTE).getValue(), hashValue);
session.getProvenanceReporter().modifyAttributes(flowFile);
session.transfer(flowFile, REL_SUCCESS);
}
}
Aggregations