use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class PutRiemann method onScheduled.
@OnScheduled
public void onScheduled(ProcessContext context) throws ProcessException {
if (batchSize == -1) {
batchSize = context.getProperty(BATCH_SIZE).asInteger();
}
if (riemannClient == null || !riemannClient.isConnected()) {
transport = Transport.valueOf(context.getProperty(TRANSPORT_PROTOCOL).getValue());
String host = context.getProperty(RIEMANN_HOST).getValue().trim();
int port = context.getProperty(RIEMANN_PORT).asInteger();
writeTimeout = context.getProperty(TIMEOUT).asLong();
RiemannClient client = null;
try {
switch(transport) {
case TCP:
client = RiemannClient.tcp(host, port);
break;
case UDP:
client = RiemannClient.udp(host, port);
break;
}
client.connect();
riemannClient = client;
} catch (IOException e) {
if (client != null) {
client.close();
}
context.yield();
throw new ProcessException(String.format("Unable to connect to Riemann [%s:%d] (%s)\n%s", host, port, transport, e.getMessage()));
}
}
if (customAttributes.size() == 0) {
for (Map.Entry<PropertyDescriptor, String> property : context.getProperties().entrySet()) {
// only custom defined properties
if (!getSupportedPropertyDescriptors().contains(property.getKey())) {
customAttributes.add(property.getKey());
}
}
}
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class ExecuteScript method setup.
/**
* Performs setup operations when the processor is scheduled to run. This includes evaluating the processor's
* properties, as well as reloading the script (from file or the "Script Body" property)
*
* @param context the context in which to perform the setup operations
*/
@OnScheduled
public void setup(final ProcessContext context) {
scriptingComponentHelper.setupVariables(context);
// Create a script engine for each possible task
int maxTasks = context.getMaxConcurrentTasks();
scriptingComponentHelper.setup(maxTasks, getLogger());
scriptToRun = scriptingComponentHelper.getScriptBody();
try {
if (scriptToRun == null && scriptingComponentHelper.getScriptPath() != null) {
try (final FileInputStream scriptStream = new FileInputStream(scriptingComponentHelper.getScriptPath())) {
scriptToRun = IOUtils.toString(scriptStream, Charset.defaultCharset());
}
}
} catch (IOException ioe) {
throw new ProcessException(ioe);
}
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class SiteToSiteProvenanceReportingTask method onScheduled.
@OnScheduled
public void onScheduled(final ConfigurationContext context) throws IOException {
consumer = new ProvenanceEventConsumer();
consumer.setStartPositionValue(context.getProperty(START_POSITION).getValue());
consumer.setBatchSize(context.getProperty(BATCH_SIZE).asInteger());
consumer.setLogger(getLogger());
// initialize component type filtering
consumer.setComponentTypeRegex(context.getProperty(FILTER_COMPONENT_TYPE).getValue());
consumer.setComponentTypeRegexExclude(context.getProperty(FILTER_COMPONENT_TYPE_EXCLUDE).getValue());
final String[] targetEventTypes = StringUtils.stripAll(StringUtils.split(context.getProperty(FILTER_EVENT_TYPE).getValue(), ','));
if (targetEventTypes != null) {
for (String type : targetEventTypes) {
try {
consumer.addTargetEventType(ProvenanceEventType.valueOf(type));
} catch (Exception e) {
getLogger().warn(type + " is not a correct event type, removed from the filtering.");
}
}
}
final String[] targetEventTypesExclude = StringUtils.stripAll(StringUtils.split(context.getProperty(FILTER_EVENT_TYPE_EXCLUDE).getValue(), ','));
if (targetEventTypesExclude != null) {
for (String type : targetEventTypesExclude) {
try {
consumer.addTargetEventTypeExclude(ProvenanceEventType.valueOf(type));
} catch (Exception e) {
getLogger().warn(type + " is not a correct event type, removed from the exclude filtering.");
}
}
}
// initialize component ID filtering
final String[] targetComponentIds = StringUtils.stripAll(StringUtils.split(context.getProperty(FILTER_COMPONENT_ID).getValue(), ','));
if (targetComponentIds != null) {
consumer.addTargetComponentId(targetComponentIds);
}
final String[] targetComponentIdsExclude = StringUtils.stripAll(StringUtils.split(context.getProperty(FILTER_COMPONENT_ID_EXCLUDE).getValue(), ','));
if (targetComponentIdsExclude != null) {
consumer.addTargetComponentIdExclude(targetComponentIdsExclude);
}
consumer.setScheduled(true);
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class UpdateRecord method createRecordPaths.
@OnScheduled
public void createRecordPaths(final ProcessContext context) {
recordPathCache = new RecordPathCache(context.getProperties().size() * 2);
final List<String> recordPaths = new ArrayList<>(context.getProperties().size() - 2);
for (final PropertyDescriptor property : context.getProperties().keySet()) {
if (property.isDynamic()) {
recordPaths.add(property.getName());
}
}
this.recordPaths = recordPaths;
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class DistributeLoad method createWeightedList.
@OnScheduled
public void createWeightedList(final ProcessContext context) {
final Map<Integer, Integer> weightings = new LinkedHashMap<>();
String distStrat = context.getProperty(DISTRIBUTION_STRATEGY).getValue();
if (distStrat.equals(STRATEGY_LOAD_DISTRIBUTION_SERVICE)) {
String hostNamesValue = context.getProperty(HOSTNAMES).getValue();
String[] hostNames = hostNamesValue.split("(?:,+|;+|\\s+)");
Set<String> hostNameSet = new HashSet<>();
for (String hostName : hostNames) {
if (StringUtils.isNotBlank(hostName)) {
hostNameSet.add(hostName);
}
}
LoadDistributionService svc = context.getProperty(LOAD_DISTRIBUTION_SERVICE_TEMPLATE).asControllerService(LoadDistributionService.class);
myListener = new LoadDistributionListener() {
@Override
public void update(Map<String, Integer> loadInfo) {
for (Relationship rel : relationshipsRef.get()) {
String hostname = rel.getDescription();
Integer weight = 1;
if (loadInfo.containsKey(hostname)) {
weight = loadInfo.get(hostname);
}
weightings.put(Integer.decode(rel.getName()), weight);
}
updateWeightedRelationships(weightings);
}
};
Map<String, Integer> loadInfo = svc.getLoadDistribution(hostNameSet, myListener);
for (Relationship rel : relationshipsRef.get()) {
String hostname = rel.getDescription();
Integer weight = 1;
if (loadInfo.containsKey(hostname)) {
weight = loadInfo.get(hostname);
}
weightings.put(Integer.decode(rel.getName()), weight);
}
} else {
final int numRelationships = context.getProperty(NUM_RELATIONSHIPS).asInteger();
for (int i = 1; i <= numRelationships; i++) {
weightings.put(i, 1);
}
for (final PropertyDescriptor propDesc : context.getProperties().keySet()) {
if (!this.properties.contains(propDesc)) {
final int relationship = Integer.parseInt(propDesc.getName());
final int weighting = context.getProperty(propDesc).asInteger();
weightings.put(relationship, weighting);
}
}
}
updateWeightedRelationships(weightings);
}
Aggregations