use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class TailFile method recoverState.
@OnScheduled
public void recoverState(final ProcessContext context) throws IOException {
// set isMultiChanging
isMultiChanging.set(context.getProperty(MODE).getValue().equals(MODE_MULTIFILE.getValue()));
// set last lookup to now
lastLookup.set(new Date().getTime());
// maxAge
long maxAge = context.getProperty(MAXIMUM_AGE).getValue() == null ? Long.MAX_VALUE : context.getProperty(MAXIMUM_AGE).asTimePeriod(TimeUnit.MILLISECONDS);
// get list of files to tail
List<String> filesToTail = new ArrayList<String>();
if (context.getProperty(MODE).getValue().equals(MODE_MULTIFILE.getValue())) {
filesToTail.addAll(getFilesToTail(context.getProperty(BASE_DIRECTORY).evaluateAttributeExpressions().getValue(), context.getProperty(FILENAME).evaluateAttributeExpressions().getValue(), context.getProperty(RECURSIVE).asBoolean(), maxAge));
} else {
filesToTail.add(context.getProperty(FILENAME).evaluateAttributeExpressions().getValue());
}
final Scope scope = getStateScope(context);
final StateMap stateMap = context.getStateManager().getState(scope);
if (stateMap.getVersion() == -1L) {
// state has been cleared or never stored so recover as 'empty state'
initStates(filesToTail, Collections.emptyMap(), true);
recoverState(context, filesToTail, Collections.emptyMap());
return;
}
Map<String, String> statesMap = stateMap.toMap();
if (statesMap.containsKey(TailFileState.StateKeys.FILENAME) && !statesMap.keySet().stream().anyMatch(key -> key.startsWith(MAP_PREFIX))) {
// If statesMap contains "filename" key without "file.0." prefix,
// and there's no key with "file." prefix, then
// it indicates that the statesMap is created with earlier version of NiFi.
// In this case, we need to migrate the state by adding prefix indexed with 0.
final Map<String, String> migratedStatesMap = new HashMap<>(statesMap.size());
for (String key : statesMap.keySet()) {
migratedStatesMap.put(MAP_PREFIX + "0." + key, statesMap.get(key));
}
// LENGTH is added from NiFi 1.1.0. Set the value with using the last position so that we can use existing state
// to avoid sending duplicated log data after updating NiFi.
migratedStatesMap.put(MAP_PREFIX + "0." + TailFileState.StateKeys.LENGTH, statesMap.get(TailFileState.StateKeys.POSITION));
statesMap = Collections.unmodifiableMap(migratedStatesMap);
getLogger().info("statesMap has been migrated. {}", new Object[] { migratedStatesMap });
}
initStates(filesToTail, statesMap, false);
recoverState(context, filesToTail, statesMap);
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class TransformXml method onScheduled.
@OnScheduled
public void onScheduled(final ProcessContext context) {
final ComponentLog logger = getLogger();
final Integer cacheSize = context.getProperty(CACHE_SIZE).asInteger();
final Long cacheTTL = context.getProperty(CACHE_TTL_AFTER_LAST_ACCESS).asTimePeriod(TimeUnit.SECONDS);
if (cacheSize > 0) {
CacheBuilder cacheBuilder = CacheBuilder.newBuilder().maximumSize(cacheSize);
if (cacheTTL > 0) {
cacheBuilder = cacheBuilder.expireAfterAccess(cacheTTL, TimeUnit.SECONDS);
}
cache = cacheBuilder.build(new CacheLoader<String, Templates>() {
public Templates load(String path) throws TransformerConfigurationException {
return newTemplates(context, path);
}
});
} else {
cache = null;
logger.warn("Stylesheet cache disabled because cache size is set to 0");
}
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class ValidateXml method parseSchema.
@OnScheduled
public void parseSchema(final ProcessContext context) throws IOException, SAXException {
try {
final File file = new File(context.getProperty(SCHEMA_FILE).getValue());
final SchemaFactory schemaFactory = SchemaFactory.newInstance(SCHEMA_LANGUAGE);
final Schema schema = schemaFactory.newSchema(file);
this.schemaRef.set(schema);
} catch (final SAXException e) {
throw e;
}
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class ListenBeats method onScheduled.
@Override
@OnScheduled
public void onScheduled(ProcessContext context) throws IOException {
super.onScheduled(context);
// wanted to ensure charset was already populated here
beatsEncoder = new BeatsEncoder();
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class GetTCP method onScheduled.
@OnScheduled
public void onScheduled(final ProcessContext context) throws ProcessException {
this.receiveBufferSize = context.getProperty(RECEIVE_BUFFER_SIZE).asDataSize(DataUnit.B).intValue();
this.originalServerAddressList = context.getProperty(ENDPOINT_LIST).getValue();
this.endOfMessageByte = ((byte) context.getProperty(END_OF_MESSAGE_BYTE).asInteger().intValue());
this.connectionAttemptCount = context.getProperty(CONNECTION_ATTEMPT_COUNT).asInteger();
this.reconnectInterval = context.getProperty(RECONNECT_INTERVAL).asTimePeriod(TimeUnit.MILLISECONDS);
this.clientScheduler = new ScheduledThreadPoolExecutor(originalServerAddressList.split(",").length + 1);
this.clientScheduler.setKeepAliveTime(10, TimeUnit.SECONDS);
this.clientScheduler.allowCoreThreadTimeOut(true);
for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
final PropertyDescriptor descriptor = entry.getKey();
if (descriptor.isDynamic()) {
this.dynamicAttributes.put(descriptor.getName(), entry.getValue());
}
}
}
Aggregations