use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class ScrollElasticsearchHttp method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
try {
if (isQueryFinished(context.getStateManager())) {
getLogger().trace("Query has been marked finished in the state manager. " + "To run another query, clear the state.");
return;
}
} catch (IOException e) {
throw new ProcessException("Could not retrieve state", e);
}
OkHttpClient okHttpClient = getClient();
FlowFile flowFile = session.create();
final String index = context.getProperty(INDEX).evaluateAttributeExpressions(flowFile).getValue();
final String query = context.getProperty(QUERY).evaluateAttributeExpressions(flowFile).getValue();
final String docType = context.getProperty(TYPE).evaluateAttributeExpressions(flowFile).getValue();
final int pageSize = context.getProperty(PAGE_SIZE).evaluateAttributeExpressions(flowFile).asInteger().intValue();
final String fields = context.getProperty(FIELDS).isSet() ? context.getProperty(FIELDS).evaluateAttributeExpressions(flowFile).getValue() : null;
final String sort = context.getProperty(SORT).isSet() ? context.getProperty(SORT).evaluateAttributeExpressions(flowFile).getValue() : null;
final String scroll = context.getProperty(SCROLL_DURATION).isSet() ? context.getProperty(SCROLL_DURATION).evaluateAttributeExpressions(flowFile).getValue() : null;
// Authentication
final String username = context.getProperty(USERNAME).evaluateAttributeExpressions().getValue();
final String password = context.getProperty(PASSWORD).evaluateAttributeExpressions().getValue();
final ComponentLog logger = getLogger();
try {
String scrollId = loadScrollId(context.getStateManager());
// read the url property from the context
final String urlstr = StringUtils.trimToEmpty(context.getProperty(ES_URL).evaluateAttributeExpressions().getValue());
if (scrollId != null) {
final URL scrollurl = buildRequestURL(urlstr, query, index, docType, fields, sort, scrollId, pageSize, scroll, context);
final long startNanos = System.nanoTime();
final Response getResponse = sendRequestToElasticsearch(okHttpClient, scrollurl, username, password, "GET", null);
this.getPage(getResponse, scrollurl, context, session, flowFile, logger, startNanos);
getResponse.close();
} else {
logger.debug("Querying {}/{} from Elasticsearch: {}", new Object[] { index, docType, query });
// read the url property from the context
final URL queryUrl = buildRequestURL(urlstr, query, index, docType, fields, sort, scrollId, pageSize, scroll, context);
final long startNanos = System.nanoTime();
final Response getResponse = sendRequestToElasticsearch(okHttpClient, queryUrl, username, password, "GET", null);
this.getPage(getResponse, queryUrl, context, session, flowFile, logger, startNanos);
getResponse.close();
}
} catch (IOException ioe) {
logger.error("Failed to read from Elasticsearch due to {}, this may indicate an error in configuration " + "(hosts, username/password, etc.).", new Object[] { ioe.getLocalizedMessage() }, ioe);
session.remove(flowFile);
context.yield();
} catch (Exception e) {
logger.error("Failed to read {} from Elasticsearch due to {}", new Object[] { flowFile, e.getLocalizedMessage() }, e);
session.transfer(flowFile, REL_FAILURE);
context.yield();
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class ExtractEmailAttachments method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
final ComponentLog logger = getLogger();
final FlowFile originalFlowFile = session.get();
if (originalFlowFile == null) {
return;
}
final List<FlowFile> attachmentsList = new ArrayList<>();
final List<FlowFile> invalidFlowFilesList = new ArrayList<>();
final List<FlowFile> originalFlowFilesList = new ArrayList<>();
final String requireStrictAddresses = "false";
session.read(originalFlowFile, new InputStreamCallback() {
@Override
public void process(final InputStream rawIn) throws IOException {
try (final InputStream in = new BufferedInputStream(rawIn)) {
Properties props = new Properties();
props.put("mail.mime.address.strict", requireStrictAddresses);
Session mailSession = Session.getInstance(props);
MimeMessage originalMessage = new MimeMessage(mailSession, in);
MimeMessageParser parser = new MimeMessageParser(originalMessage).parse();
// RFC-2822 determines that a message must have a "From:" header
// if a message lacks the field, it is flagged as invalid
Address[] from = originalMessage.getFrom();
if (from == null) {
throw new MessagingException("Message failed RFC-2822 validation: No Sender");
}
Date sentDate = originalMessage.getSentDate();
if (sentDate == null) {
// Throws MessageException due to lack of minimum required headers
throw new MessagingException("Message failed RFC2822 validation: No Sent Date");
}
originalFlowFilesList.add(originalFlowFile);
if (parser.hasAttachments()) {
final String originalFlowFileName = originalFlowFile.getAttribute(CoreAttributes.FILENAME.key());
try {
for (final DataSource data : parser.getAttachmentList()) {
FlowFile split = session.create(originalFlowFile);
final Map<String, String> attributes = new HashMap<>();
if (StringUtils.isNotBlank(data.getName())) {
attributes.put(CoreAttributes.FILENAME.key(), data.getName());
}
if (StringUtils.isNotBlank(data.getContentType())) {
attributes.put(CoreAttributes.MIME_TYPE.key(), data.getContentType());
}
String parentUuid = originalFlowFile.getAttribute(CoreAttributes.UUID.key());
attributes.put(ATTACHMENT_ORIGINAL_UUID, parentUuid);
attributes.put(ATTACHMENT_ORIGINAL_FILENAME, originalFlowFileName);
split = session.append(split, new OutputStreamCallback() {
@Override
public void process(OutputStream out) throws IOException {
IOUtils.copy(data.getInputStream(), out);
}
});
split = session.putAllAttributes(split, attributes);
attachmentsList.add(split);
}
} catch (FlowFileHandlingException e) {
// Something went wrong
// Removing splits that may have been created
session.remove(attachmentsList);
// Removing the original flow from its list
originalFlowFilesList.remove(originalFlowFile);
logger.error("Flowfile {} triggered error {} while processing message removing generated FlowFiles from sessions", new Object[] { originalFlowFile, e });
invalidFlowFilesList.add(originalFlowFile);
}
}
} catch (Exception e) {
// Another error hit...
// Removing the original flow from its list
originalFlowFilesList.remove(originalFlowFile);
logger.error("Could not parse the flowfile {} as an email, treating as failure", new Object[] { originalFlowFile, e });
// Message is invalid or triggered an error during parsing
invalidFlowFilesList.add(originalFlowFile);
}
}
});
session.transfer(attachmentsList, REL_ATTACHMENTS);
// As per above code, originalFlowfile may be routed to invalid or
// original depending on RFC2822 compliance.
session.transfer(invalidFlowFilesList, REL_FAILURE);
session.transfer(originalFlowFilesList, REL_ORIGINAL);
if (attachmentsList.size() > 10) {
logger.info("Split {} into {} files", new Object[] { originalFlowFile, attachmentsList.size() });
} else if (attachmentsList.size() > 1) {
logger.info("Split {} into {} files: {}", new Object[] { originalFlowFile, attachmentsList.size(), attachmentsList });
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class ParseEvtx method onTrigger.
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
ComponentLog logger = getLogger();
final FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
String basename = getBasename(flowFile, logger);
String granularity = context.getProperty(GRANULARITY).getValue();
if (FILE.equals(granularity)) {
// File granularity will emit a FlowFile for each input
FlowFile original = session.clone(flowFile);
AtomicReference<Exception> exceptionReference = new AtomicReference<>(null);
FlowFile updated = session.write(flowFile, (in, out) -> {
processFileGranularity(session, logger, original, basename, exceptionReference, in, out);
});
session.transfer(original, REL_ORIGINAL);
resultProcessor.process(session, logger, updated, exceptionReference.get(), getName(basename, null, null, XML_EXTENSION));
} else {
session.read(flowFile, in -> {
if (RECORD.equals(granularity)) {
// Record granularity will emit a FlowFile for every record (event)
processRecordGranularity(session, logger, flowFile, basename, in);
} else if (CHUNK.equals(granularity)) {
// Chunk granularity will emit a FlowFile for each chunk of the file
processChunkGranularity(session, logger, flowFile, basename, in);
}
});
session.transfer(flowFile, REL_ORIGINAL);
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class ParseEvtx method processRecordGranularity.
protected void processRecordGranularity(ProcessSession session, ComponentLog componentLog, FlowFile flowFile, String basename, InputStream in) throws IOException {
FileHeader fileHeader = fileHeaderFactory.create(in, componentLog);
while (fileHeader.hasNext()) {
try {
ChunkHeader chunkHeader = fileHeader.next();
while (chunkHeader.hasNext()) {
FlowFile updated = session.create(flowFile);
AtomicReference<Exception> exceptionReference = new AtomicReference<>(null);
try {
Record record = chunkHeader.next();
updated = session.write(updated, out -> {
try (RootNodeHandler rootNodeHandler = rootNodeHandlerFactory.create(out)) {
try {
rootNodeHandler.handle(record.getRootNode());
} catch (IOException e) {
exceptionReference.set(e);
}
} catch (IOException e) {
exceptionReference.set(e);
}
});
resultProcessor.process(session, componentLog, updated, exceptionReference.get(), getName(basename, chunkHeader.getChunkNumber(), record.getRecordNum(), XML_EXTENSION));
} catch (Exception e) {
exceptionReference.set(e);
session.remove(updated);
}
if (exceptionReference.get() != null) {
malformedChunkHandler.handle(flowFile, session, getName(basename, chunkHeader.getChunkNumber(), null, EVTX_EXTENSION), chunkHeader.getBinaryReader().getBytes());
}
}
} catch (MalformedChunkException e) {
malformedChunkHandler.handle(flowFile, session, getName(basename, e.getChunkNum(), null, EVTX_EXTENSION), e.getBadChunk());
}
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class TestAmbariReportingTask method testOnTrigger.
@Test
public void testOnTrigger() throws InitializationException, IOException {
final String metricsUrl = "http://myambari:6188/ws/v1/timeline/metrics";
final String applicationId = "NIFI";
final String hostName = "localhost";
// create the jersey client mocks for handling the post
final Client client = Mockito.mock(Client.class);
final WebTarget target = Mockito.mock(WebTarget.class);
final Invocation.Builder builder = Mockito.mock(Invocation.Builder.class);
final Response response = Mockito.mock(Response.class);
Mockito.when(response.getStatus()).thenReturn(200);
Mockito.when(client.target(metricsUrl)).thenReturn(target);
Mockito.when(target.request()).thenReturn(builder);
Mockito.when(builder.post(Matchers.any(Entity.class))).thenReturn(response);
// mock the ReportingInitializationContext for initialize(...)
final ComponentLog logger = Mockito.mock(ComponentLog.class);
final ReportingInitializationContext initContext = Mockito.mock(ReportingInitializationContext.class);
Mockito.when(initContext.getIdentifier()).thenReturn(UUID.randomUUID().toString());
Mockito.when(initContext.getLogger()).thenReturn(logger);
// mock the ConfigurationContext for setup(...)
final ConfigurationContext configurationContext = Mockito.mock(ConfigurationContext.class);
// mock the ReportingContext for onTrigger(...)
final ReportingContext context = Mockito.mock(ReportingContext.class);
Mockito.when(context.getProperty(AmbariReportingTask.METRICS_COLLECTOR_URL)).thenReturn(new MockPropertyValue(metricsUrl));
Mockito.when(context.getProperty(AmbariReportingTask.APPLICATION_ID)).thenReturn(new MockPropertyValue(applicationId));
Mockito.when(context.getProperty(AmbariReportingTask.HOSTNAME)).thenReturn(new MockPropertyValue(hostName));
Mockito.when(context.getProperty(AmbariReportingTask.PROCESS_GROUP_ID)).thenReturn(new MockPropertyValue("1234"));
final EventAccess eventAccess = Mockito.mock(EventAccess.class);
Mockito.when(context.getEventAccess()).thenReturn(eventAccess);
Mockito.when(eventAccess.getControllerStatus()).thenReturn(status);
// create a testable instance of the reporting task
final AmbariReportingTask task = new TestableAmbariReportingTask(client);
task.initialize(initContext);
task.setup(configurationContext);
task.onTrigger(context);
}
Aggregations