use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class ScriptedReportingTask method setup.
/**
* Performs setup operations when the processor is scheduled to run. This includes evaluating the processor's
* properties, as well as reloading the script (from file or the "Script Body" property)
*
* @param context the context in which to perform the setup operations
*/
@OnScheduled
public void setup(final ConfigurationContext context) {
scriptingComponentHelper.setupVariables(context);
// Create a script engine for each possible task
scriptingComponentHelper.setup(1, getLogger());
scriptToRun = scriptingComponentHelper.getScriptBody();
try {
String scriptPath = scriptingComponentHelper.getScriptPath();
if (scriptToRun == null && scriptPath != null) {
try (final FileInputStream scriptStream = new FileInputStream(scriptPath)) {
scriptToRun = IOUtils.toString(scriptStream, Charset.defaultCharset());
}
}
} catch (IOException ioe) {
throw new ProcessException(ioe);
}
vmMetrics = VirtualMachineMetrics.getInstance();
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class AbstractSiteToSiteReportingTask method setup.
@OnScheduled
public void setup(final ConfigurationContext context) throws IOException {
final SSLContextService sslContextService = context.getProperty(SSL_CONTEXT).asControllerService(SSLContextService.class);
final SSLContext sslContext = sslContextService == null ? null : sslContextService.createSSLContext(SSLContextService.ClientAuth.REQUIRED);
final ComponentLog logger = getLogger();
final EventReporter eventReporter = new EventReporter() {
@Override
public void reportEvent(final Severity severity, final String category, final String message) {
switch(severity) {
case WARNING:
logger.warn(message);
break;
case ERROR:
logger.error(message);
break;
default:
break;
}
}
};
final String destinationUrl = context.getProperty(DESTINATION_URL).evaluateAttributeExpressions().getValue();
final SiteToSiteTransportProtocol mode = SiteToSiteTransportProtocol.valueOf(context.getProperty(TRANSPORT_PROTOCOL).getValue());
final HttpProxy httpProxy = mode.equals(SiteToSiteTransportProtocol.RAW) || StringUtils.isEmpty(context.getProperty(HTTP_PROXY_HOSTNAME).getValue()) ? null : new HttpProxy(context.getProperty(HTTP_PROXY_HOSTNAME).getValue(), context.getProperty(HTTP_PROXY_PORT).asInteger(), context.getProperty(HTTP_PROXY_USERNAME).getValue(), context.getProperty(HTTP_PROXY_PASSWORD).getValue());
siteToSiteClient = new SiteToSiteClient.Builder().urls(SiteToSiteRestApiClient.parseClusterUrls(destinationUrl)).portName(context.getProperty(PORT_NAME).getValue()).useCompression(context.getProperty(COMPRESS).asBoolean()).eventReporter(eventReporter).sslContext(sslContext).timeout(context.getProperty(TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS).transportProtocol(mode).httpProxy(httpProxy).build();
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class ListenLumberjack method onScheduled.
@Override
@OnScheduled
public void onScheduled(ProcessContext context) throws IOException {
super.onScheduled(context);
// wanted to ensure charset was already populated here
lumberjackEncoder = new LumberjackEncoder();
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class PutHiveStreaming method setup.
@OnScheduled
public void setup(final ProcessContext context) {
ComponentLog log = getLogger();
final Integer heartbeatInterval = context.getProperty(HEARTBEAT_INTERVAL).evaluateAttributeExpressions().asInteger();
final String configFiles = context.getProperty(HIVE_CONFIGURATION_RESOURCES).getValue();
hiveConfig = hiveConfigurator.getConfigurationFromFiles(configFiles);
// If more than one concurrent task, force 'hcatalog.hive.client.cache.disabled' to true
if (context.getMaxConcurrentTasks() > 1) {
hiveConfig.setBoolean(CLIENT_CACHE_DISABLED_PROPERTY, true);
}
// add any dynamic properties to the Hive configuration
for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
final PropertyDescriptor descriptor = entry.getKey();
if (descriptor.isDynamic()) {
hiveConfig.set(descriptor.getName(), entry.getValue());
}
}
hiveConfigurator.preload(hiveConfig);
if (SecurityUtil.isSecurityEnabled(hiveConfig)) {
final String explicitPrincipal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
final String explicitKeytab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
final String resolvedPrincipal;
final String resolvedKeytab;
if (credentialsService == null) {
resolvedPrincipal = explicitPrincipal;
resolvedKeytab = explicitKeytab;
} else {
resolvedPrincipal = credentialsService.getPrincipal();
resolvedKeytab = credentialsService.getKeytab();
}
log.info("Hive Security Enabled, logging in as principal {} with keytab {}", new Object[] { resolvedPrincipal, resolvedKeytab });
try {
ugi = hiveConfigurator.authenticate(hiveConfig, resolvedPrincipal, resolvedKeytab);
} catch (AuthenticationFailedException ae) {
throw new ProcessException("Kerberos authentication failed for Hive Streaming", ae);
}
log.info("Successfully logged in as principal {} with keytab {}", new Object[] { resolvedPrincipal, resolvedKeytab });
} else {
ugi = null;
}
// milliseconds
callTimeout = context.getProperty(CALL_TIMEOUT).evaluateAttributeExpressions().asInteger() * 1000;
String timeoutName = "put-hive-streaming-%d";
this.callTimeoutPool = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setNameFormat(timeoutName).build());
sendHeartBeat.set(true);
heartBeatTimer = new Timer();
setupHeartBeatTimer(heartbeatInterval);
}
use of org.apache.nifi.annotation.lifecycle.OnScheduled in project nifi by apache.
the class ExtractMediaMetadata method onScheduled.
@SuppressWarnings("unused")
@OnScheduled
public void onScheduled(ProcessContext context) {
String metadataKeyFilterInput = context.getProperty(METADATA_KEY_FILTER).getValue();
if (metadataKeyFilterInput != null && metadataKeyFilterInput.length() > 0) {
metadataKeyFilterRef.set(Pattern.compile(metadataKeyFilterInput));
} else {
metadataKeyFilterRef.set(null);
}
autoDetectParser = new AutoDetectParser();
}
Aggregations