use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class TestMonitorDiskUsage method testGeneratesMessageIfTooFull.
@Test
public void testGeneratesMessageIfTooFull() {
final AtomicInteger callCounter = new AtomicInteger(0);
final ComponentLog logger = Mockito.mock(ComponentLog.class);
Mockito.doAnswer(invocation -> {
final String message = (String) invocation.getArguments()[0];
System.out.println(message);
callCounter.incrementAndGet();
return null;
}).when(logger).warn(Mockito.anyString());
MonitorDiskUsage.checkThreshold("Test Path", Paths.get("."), 0, logger);
assertEquals(1, callCounter.get());
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class JettyWebSocketClient method maintainSessions.
void maintainSessions() throws Exception {
if (client == null) {
return;
}
connectionLock.lock();
final ComponentLog logger = getLogger();
try {
// Loop through existing sessions and reconnect.
for (String clientId : activeSessions.keySet()) {
final WebSocketMessageRouter router;
try {
router = routers.getRouterOrFail(clientId);
} catch (final WebSocketConfigurationException e) {
if (logger.isDebugEnabled()) {
logger.debug("The clientId {} is no longer active. Discarding the clientId.", new Object[] { clientId });
}
activeSessions.remove(clientId);
continue;
}
final String sessionId = activeSessions.get(clientId);
// If this session is still alive, do nothing.
if (!router.containsSession(sessionId)) {
// This session is no longer active, reconnect it.
// If it fails, the sessionId will remain in activeSessions, and retries later.
// This reconnect attempt is continued until user explicitly stops a processor or this controller service.
connect(clientId, sessionId);
}
}
} finally {
connectionLock.unlock();
}
if (logger.isDebugEnabled()) {
logger.debug("Session maintenance completed. activeSessions={}", new Object[] { activeSessions });
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class CSVRecordLookupService method loadCache.
private void loadCache() throws IllegalStateException, IOException {
if (lock.tryLock()) {
try {
final ComponentLog logger = getLogger();
if (logger.isDebugEnabled()) {
logger.debug("Loading lookup table from file: " + csvFile);
}
final FileReader reader = new FileReader(csvFile);
final CSVParser records = csvFormat.withFirstRecordAsHeader().parse(reader);
ConcurrentHashMap<String, Record> cache = new ConcurrentHashMap<>();
RecordSchema lookupRecordSchema = null;
for (final CSVRecord record : records) {
final String key = record.get(lookupKeyColumn);
if (StringUtils.isBlank(key)) {
throw new IllegalStateException("Empty lookup key encountered in: " + csvFile);
} else if (!ignoreDuplicates && cache.containsKey(key)) {
throw new IllegalStateException("Duplicate lookup key encountered: " + key + " in " + csvFile);
} else if (ignoreDuplicates && cache.containsKey(key)) {
logger.warn("Duplicate lookup key encountered: {} in {}", new Object[] { key, csvFile });
}
// Put each key/value pair (except the lookup) into the properties
final Map<String, Object> properties = new HashMap<>();
record.toMap().forEach((k, v) -> {
if (!lookupKeyColumn.equals(k)) {
properties.put(k, v);
}
});
if (lookupRecordSchema == null) {
List<RecordField> recordFields = new ArrayList<>(properties.size());
properties.forEach((k, v) -> recordFields.add(new RecordField(k, RecordFieldType.STRING.getDataType())));
lookupRecordSchema = new SimpleRecordSchema(recordFields);
}
cache.put(key, new MapRecord(lookupRecordSchema, properties));
}
this.cache = cache;
if (cache.isEmpty()) {
logger.warn("Lookup table is empty after reading file: " + csvFile);
}
} finally {
lock.unlock();
}
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class TestJsonTreeRowRecordReader method testPerformanceOnIndividualMessages.
@Test
@Ignore("Intended only for manual testing to determine performance before/after modifications")
public void testPerformanceOnIndividualMessages() throws IOException, MalformedRecordException {
final RecordSchema schema = new SimpleRecordSchema(Collections.emptyList());
final File file = new File("/devel/nifi/nifi-assembly/target/nifi-1.2.0-SNAPSHOT-bin/nifi-1.2.0-SNAPSHOT/1.prov.json");
final byte[] data = Files.readAllBytes(file.toPath());
final ComponentLog logger = Mockito.mock(ComponentLog.class);
int recordCount = 0;
final int iterations = 1_000_000;
for (int j = 0; j < 5; j++) {
final long start = System.nanoTime();
for (int i = 0; i < iterations; i++) {
try (final InputStream in = new ByteArrayInputStream(data);
final JsonTreeRowRecordReader reader = new JsonTreeRowRecordReader(in, logger, schema, dateFormat, timeFormat, timestampFormat)) {
while (reader.nextRecord() != null) {
recordCount++;
}
}
}
final long nanos = System.nanoTime() - start;
final long millis = TimeUnit.NANOSECONDS.toMillis(nanos);
System.out.println("Took " + millis + " millis to read " + recordCount + " records");
}
}
use of org.apache.nifi.logging.ComponentLog in project nifi by apache.
the class UpdateAttribute method evaluateCriteria.
// Evaluates the specified Criteria on the specified flowfile. Clones the
// specified flow file for each rule that is applied.
private boolean evaluateCriteria(final ProcessSession session, final ProcessContext context, final Criteria criteria, final FlowFile flowfile, final Map<FlowFile, List<Rule>> matchedRules, final Map<String, String> statefulAttributes) {
final ComponentLog logger = getLogger();
final List<Rule> rules = criteria.getRules();
// consider each rule and hold a copy of the flowfile for each matched rule
for (final Rule rule : rules) {
// evaluate the rule
if (evaluateRule(context, rule, flowfile, statefulAttributes)) {
final FlowFile flowfileToUse;
// determine if we should use the original flow file or clone
if (FlowFilePolicy.USE_ORIGINAL.equals(criteria.getFlowFilePolicy()) || matchedRules.isEmpty()) {
flowfileToUse = flowfile;
} else {
// clone the original for this rule
flowfileToUse = session.clone(flowfile);
}
// store the flow file to use when executing this rule
List<Rule> rulesForFlowFile = matchedRules.get(flowfileToUse);
if (rulesForFlowFile == null) {
rulesForFlowFile = new ArrayList<>();
matchedRules.put(flowfileToUse, rulesForFlowFile);
}
rulesForFlowFile.add(rule);
// log if appropriate
if (debugEnabled) {
logger.debug(this + " all conditions met for rule '" + rule.getName() + "'. Using flow file - " + flowfileToUse);
}
}
}
return !matchedRules.isEmpty();
}
Aggregations