Search in sources :

Example 1 with AuthenticationFailedException

use of org.apache.nifi.util.hive.AuthenticationFailedException in project nifi by apache.

the class HiveConnectionPool method onConfigured.

/**
 * Configures connection pool by creating an instance of the
 * {@link BasicDataSource} based on configuration provided with
 * {@link ConfigurationContext}.
 * <p>
 * This operation makes no guarantees that the actual connection could be
 * made since the underlying system may still go off-line during normal
 * operation of the connection pool.
 * <p/>
 * As of Apache NiFi 1.5.0, due to changes made to
 * {@link SecurityUtil#loginKerberos(Configuration, String, String)}, which is used by this class invoking
 * {@link HiveConfigurator#authenticate(Configuration, String, String)}
 * to authenticate a principal with Kerberos, Hive controller services no longer
 * attempt relogins explicitly.  For more information, please read the documentation for
 * {@link SecurityUtil#loginKerberos(Configuration, String, String)}.
 * <p/>
 * In previous versions of NiFi, a {@link org.apache.nifi.hadoop.KerberosTicketRenewer} was started by
 * {@link HiveConfigurator#authenticate(Configuration, String, String, long)} when the Hive
 * controller service was enabled.  The use of a separate thread to explicitly relogin could cause race conditions
 * with the implicit relogin attempts made by hadoop/Hive code on a thread that references the same
 * {@link UserGroupInformation} instance.  One of these threads could leave the
 * {@link javax.security.auth.Subject} in {@link UserGroupInformation} to be cleared or in an unexpected state
 * while the other thread is attempting to use the {@link javax.security.auth.Subject}, resulting in failed
 * authentication attempts that would leave the Hive controller service in an unrecoverable state.
 *
 * @see SecurityUtil#loginKerberos(Configuration, String, String)
 * @see HiveConfigurator#authenticate(Configuration, String, String)
 * @see HiveConfigurator#authenticate(Configuration, String, String, long)
 * @param context the configuration context
 * @throws InitializationException if unable to create a database connection
 */
@OnEnabled
public void onConfigured(final ConfigurationContext context) throws InitializationException {
    ComponentLog log = getLogger();
    final String configFiles = context.getProperty(HIVE_CONFIGURATION_RESOURCES).evaluateAttributeExpressions().getValue();
    final Configuration hiveConfig = hiveConfigurator.getConfigurationFromFiles(configFiles);
    final String validationQuery = context.getProperty(VALIDATION_QUERY).evaluateAttributeExpressions().getValue();
    // add any dynamic properties to the Hive configuration
    for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
        final PropertyDescriptor descriptor = entry.getKey();
        if (descriptor.isDynamic()) {
            hiveConfig.set(descriptor.getName(), context.getProperty(descriptor).evaluateAttributeExpressions().getValue());
        }
    }
    final String drv = HiveDriver.class.getName();
    if (SecurityUtil.isSecurityEnabled(hiveConfig)) {
        final String explicitPrincipal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
        final String explicitKeytab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
        final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
        final String resolvedPrincipal;
        final String resolvedKeytab;
        if (credentialsService == null) {
            resolvedPrincipal = explicitPrincipal;
            resolvedKeytab = explicitKeytab;
        } else {
            resolvedPrincipal = credentialsService.getPrincipal();
            resolvedKeytab = credentialsService.getKeytab();
        }
        log.info("Hive Security Enabled, logging in as principal {} with keytab {}", new Object[] { resolvedPrincipal, resolvedKeytab });
        try {
            ugi = hiveConfigurator.authenticate(hiveConfig, resolvedPrincipal, resolvedKeytab);
        } catch (AuthenticationFailedException ae) {
            log.error(ae.getMessage(), ae);
        }
        getLogger().info("Successfully logged in as principal {} with keytab {}", new Object[] { resolvedPrincipal, resolvedKeytab });
    }
    final String user = context.getProperty(DB_USER).evaluateAttributeExpressions().getValue();
    final String passw = context.getProperty(DB_PASSWORD).evaluateAttributeExpressions().getValue();
    final Long maxWaitMillis = context.getProperty(MAX_WAIT_TIME).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS);
    final Integer maxTotal = context.getProperty(MAX_TOTAL_CONNECTIONS).evaluateAttributeExpressions().asInteger();
    dataSource = new BasicDataSource();
    dataSource.setDriverClassName(drv);
    connectionUrl = context.getProperty(DATABASE_URL).evaluateAttributeExpressions().getValue();
    dataSource.setMaxWait(maxWaitMillis);
    dataSource.setMaxActive(maxTotal);
    if (validationQuery != null && !validationQuery.isEmpty()) {
        dataSource.setValidationQuery(validationQuery);
        dataSource.setTestOnBorrow(true);
    }
    dataSource.setUrl(connectionUrl);
    dataSource.setUsername(user);
    dataSource.setPassword(passw);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) AuthenticationFailedException(org.apache.nifi.util.hive.AuthenticationFailedException) KerberosCredentialsService(org.apache.nifi.kerberos.KerberosCredentialsService) ComponentLog(org.apache.nifi.logging.ComponentLog) Map(java.util.Map) BasicDataSource(org.apache.commons.dbcp.BasicDataSource) OnEnabled(org.apache.nifi.annotation.lifecycle.OnEnabled)

Example 2 with AuthenticationFailedException

use of org.apache.nifi.util.hive.AuthenticationFailedException in project nifi by apache.

the class PutHiveStreaming method setup.

@OnScheduled
public void setup(final ProcessContext context) {
    ComponentLog log = getLogger();
    final Integer heartbeatInterval = context.getProperty(HEARTBEAT_INTERVAL).evaluateAttributeExpressions().asInteger();
    final String configFiles = context.getProperty(HIVE_CONFIGURATION_RESOURCES).getValue();
    hiveConfig = hiveConfigurator.getConfigurationFromFiles(configFiles);
    // If more than one concurrent task, force 'hcatalog.hive.client.cache.disabled' to true
    if (context.getMaxConcurrentTasks() > 1) {
        hiveConfig.setBoolean(CLIENT_CACHE_DISABLED_PROPERTY, true);
    }
    // add any dynamic properties to the Hive configuration
    for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
        final PropertyDescriptor descriptor = entry.getKey();
        if (descriptor.isDynamic()) {
            hiveConfig.set(descriptor.getName(), entry.getValue());
        }
    }
    hiveConfigurator.preload(hiveConfig);
    if (SecurityUtil.isSecurityEnabled(hiveConfig)) {
        final String explicitPrincipal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
        final String explicitKeytab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
        final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
        final String resolvedPrincipal;
        final String resolvedKeytab;
        if (credentialsService == null) {
            resolvedPrincipal = explicitPrincipal;
            resolvedKeytab = explicitKeytab;
        } else {
            resolvedPrincipal = credentialsService.getPrincipal();
            resolvedKeytab = credentialsService.getKeytab();
        }
        log.info("Hive Security Enabled, logging in as principal {} with keytab {}", new Object[] { resolvedPrincipal, resolvedKeytab });
        try {
            ugi = hiveConfigurator.authenticate(hiveConfig, resolvedPrincipal, resolvedKeytab);
        } catch (AuthenticationFailedException ae) {
            throw new ProcessException("Kerberos authentication failed for Hive Streaming", ae);
        }
        log.info("Successfully logged in as principal {} with keytab {}", new Object[] { resolvedPrincipal, resolvedKeytab });
    } else {
        ugi = null;
    }
    // milliseconds
    callTimeout = context.getProperty(CALL_TIMEOUT).evaluateAttributeExpressions().asInteger() * 1000;
    String timeoutName = "put-hive-streaming-%d";
    this.callTimeoutPool = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setNameFormat(timeoutName).build());
    sendHeartBeat.set(true);
    heartBeatTimer = new Timer();
    setupHeartBeatTimer(heartbeatInterval);
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ProcessException(org.apache.nifi.processor.exception.ProcessException) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) Timer(java.util.Timer) AuthenticationFailedException(org.apache.nifi.util.hive.AuthenticationFailedException) KerberosCredentialsService(org.apache.nifi.kerberos.KerberosCredentialsService) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) ComponentLog(org.apache.nifi.logging.ComponentLog) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) OnScheduled(org.apache.nifi.annotation.lifecycle.OnScheduled)

Aggregations

Map (java.util.Map)2 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)2 KerberosCredentialsService (org.apache.nifi.kerberos.KerberosCredentialsService)2 ComponentLog (org.apache.nifi.logging.ComponentLog)2 AuthenticationFailedException (org.apache.nifi.util.hive.AuthenticationFailedException)2 ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)1 HashMap (java.util.HashMap)1 Timer (java.util.Timer)1 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)1 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1 BasicDataSource (org.apache.commons.dbcp.BasicDataSource)1 Configuration (org.apache.hadoop.conf.Configuration)1 OnEnabled (org.apache.nifi.annotation.lifecycle.OnEnabled)1 OnScheduled (org.apache.nifi.annotation.lifecycle.OnScheduled)1 ProcessException (org.apache.nifi.processor.exception.ProcessException)1