Search in sources :

Example 6 with NonNull

use of lombok.NonNull in project cas by apereo.

the class SamlIdPObjectSigner method prepareSecurityParametersContext.

/**
 * Prepare security parameters context.
 *
 * @param <T>             the type parameter
 * @param adaptor         the adaptor
 * @param outboundContext the outbound context
 * @param service         the service
 * @throws SAMLException the saml exception
 */
protected <T extends SAMLObject> void prepareSecurityParametersContext(final SamlRegisteredServiceServiceProviderMetadataFacade adaptor, final MessageContext<T> outboundContext, final SamlRegisteredService service) throws SAMLException {
    @NonNull final SecurityParametersContext secParametersContext = outboundContext.getSubcontext(SecurityParametersContext.class, true);
    final RoleDescriptor roleDesc = adaptor.getSsoDescriptor();
    final SignatureSigningParameters signingParameters = buildSignatureSigningParameters(roleDesc, service);
    secParametersContext.setSignatureSigningParameters(signingParameters);
}
Also used : SignatureSigningParameters(org.opensaml.xmlsec.SignatureSigningParameters) NonNull(lombok.NonNull) SecurityParametersContext(org.opensaml.xmlsec.context.SecurityParametersContext) RoleDescriptor(org.opensaml.saml.saml2.metadata.RoleDescriptor)

Example 7 with NonNull

use of lombok.NonNull in project cas by apereo.

the class AbstractWrapperAuthenticationHandler method doAuthentication.

@Override
protected AuthenticationHandlerExecutionResult doAuthentication(final Credential credential) throws GeneralSecurityException {
    final C credentials = convertToPac4jCredentials((I) credential);
    LOGGER.debug("credentials: [{}]", credentials);
    try {
        @NonNull final Authenticator authenticator = getAuthenticator(credential);
        if (authenticator instanceof InitializableObject) {
            ((InitializableObject) authenticator).init();
        }
        authenticator.validate(credentials, getWebContext());
        final UserProfile profile = this.profileCreator.create(credentials, getWebContext());
        LOGGER.debug("profile: [{}]", profile);
        return createResult(new ClientCredential(credentials, authenticator.getClass().getSimpleName()), profile);
    } catch (final Exception e) {
        LOGGER.error("Failed to validate credentials", e);
        throw new FailedLoginException("Failed to validate credentials: " + e.getMessage());
    }
}
Also used : ClientCredential(org.apereo.cas.authentication.principal.ClientCredential) FailedLoginException(javax.security.auth.login.FailedLoginException) UserProfile(org.pac4j.core.profile.UserProfile) NonNull(lombok.NonNull) InitializableObject(org.pac4j.core.util.InitializableObject) Authenticator(org.pac4j.core.credentials.authenticator.Authenticator) GeneralSecurityException(java.security.GeneralSecurityException) FailedLoginException(javax.security.auth.login.FailedLoginException)

Example 8 with NonNull

use of lombok.NonNull in project cas by apereo.

the class PolicyBasedAuthenticationManager method authenticateInternal.

/**
 * Authenticate internal authentication builder.
 *
 * @param transaction the transaction
 * @return the authentication builder
 * @throws AuthenticationException the authentication exception
 */
protected AuthenticationBuilder authenticateInternal(final AuthenticationTransaction transaction) throws AuthenticationException {
    final Collection<Credential> credentials = transaction.getCredentials();
    LOGGER.debug("Authentication credentials provided for this transaction are [{}]", credentials);
    if (credentials.isEmpty()) {
        LOGGER.error("Resolved authentication handlers for this transaction are empty");
        throw new AuthenticationException("Resolved credentials for this transaction are empty");
    }
    final AuthenticationBuilder builder = new DefaultAuthenticationBuilder(NullPrincipal.getInstance());
    credentials.stream().forEach(cred -> builder.addCredential(new BasicCredentialMetaData(cred)));
    @NonNull final Set<AuthenticationHandler> handlerSet = getAuthenticationHandlersForThisTransaction(transaction);
    LOGGER.debug("Candidate resolved authentication handlers for this transaction are [{}]", handlerSet);
    if (handlerSet.isEmpty()) {
        LOGGER.error("Resolved authentication handlers for this transaction are empty");
        throw new AuthenticationException(builder.getFailures(), builder.getSuccesses());
    }
    try {
        final Iterator<Credential> it = credentials.iterator();
        AuthenticationCredentialsThreadLocalBinder.clearInProgressAuthentication();
        while (it.hasNext()) {
            final Credential credential = it.next();
            LOGGER.debug("Attempting to authenticate credential [{}]", credential);
            final Iterator<AuthenticationHandler> itHandlers = handlerSet.iterator();
            boolean proceedWithNextHandler = true;
            while (proceedWithNextHandler && itHandlers.hasNext()) {
                final AuthenticationHandler handler = itHandlers.next();
                if (handler.supports(credential)) {
                    try {
                        final PrincipalResolver resolver = getPrincipalResolverLinkedToHandlerIfAny(handler, transaction);
                        LOGGER.debug("Attempting authentication of [{}] using [{}]", credential.getId(), handler.getName());
                        authenticateAndResolvePrincipal(builder, credential, resolver, handler);
                        AuthenticationCredentialsThreadLocalBinder.bindInProgress(builder.build());
                        final Pair<Boolean, Set<Throwable>> failures = evaluateAuthenticationPolicies(builder.build(), transaction);
                        proceedWithNextHandler = !failures.getKey();
                    } catch (final Exception e) {
                        LOGGER.error("Authentication has failed. Credentials may be incorrect or CAS cannot " + "find authentication handler that supports [{}] of type [{}]. Examine the configuration to " + "ensure a method of authentication is defined and analyze CAS logs at DEBUG level to trace " + "the authentication event.", credential, credential.getClass().getSimpleName());
                        handleAuthenticationException(e, handler.getName(), builder);
                        proceedWithNextHandler = true;
                    }
                } else {
                    LOGGER.debug("Authentication handler [{}] does not support the credential type [{}]. Trying next...", handler.getName(), credential);
                }
            }
        }
        evaluateFinalAuthentication(builder, transaction);
        return builder;
    } finally {
        AuthenticationCredentialsThreadLocalBinder.clearInProgressAuthentication();
    }
}
Also used : LinkedHashSet(java.util.LinkedHashSet) Set(java.util.Set) UnresolvedPrincipalException(org.apereo.cas.authentication.exceptions.UnresolvedPrincipalException) GeneralSecurityException(java.security.GeneralSecurityException) UndeclaredThrowableException(java.lang.reflect.UndeclaredThrowableException) PrincipalResolver(org.apereo.cas.authentication.principal.PrincipalResolver) NonNull(lombok.NonNull)

Example 9 with NonNull

use of lombok.NonNull in project metacat by Netflix.

the class HiveConnectorPartitionService method savePartitions.

/**
     * {@inheritDoc}.
     */
@Override
public PartitionsSaveResponse savePartitions(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final QualifiedName tableName, @Nonnull @NonNull final PartitionsSaveRequest partitionsSaveRequest) {
    final String databasename = tableName.getDatabaseName();
    final String tablename = tableName.getTableName();
    // New partitions
    final List<Partition> hivePartitions = Lists.newArrayList();
    try {
        final Table table = metacatHiveClient.getTableByName(databasename, tablename);
        final List<PartitionInfo> partitionInfos = partitionsSaveRequest.getPartitions();
        // New partition ids
        final List<String> addedPartitionIds = Lists.newArrayList();
        // Updated partition ids
        final List<String> existingPartitionIds = Lists.newArrayList();
        // Existing partitions
        final List<Partition> existingHivePartitions = Lists.newArrayList();
        // Existing partition map
        Map<String, Partition> existingPartitionMap = Collections.emptyMap();
        if (partitionsSaveRequest.getCheckIfExists()) {
            final List<String> partitionNames = partitionInfos.stream().map(partition -> {
                final String partitionName = partition.getName().getPartitionName();
                PartitionUtil.validatePartitionName(partitionName, getPartitionKeys(table.getPartitionKeys()));
                return partitionName;
            }).collect(Collectors.toList());
            existingPartitionMap = getPartitionsByNames(table, partitionNames);
        }
        final TableInfo tableInfo = hiveMetacatConverters.toTableInfo(tableName, table);
        for (PartitionInfo partitionInfo : partitionInfos) {
            final String partitionName = partitionInfo.getName().getPartitionName();
            final Partition hivePartition = existingPartitionMap.get(partitionName);
            if (hivePartition == null) {
                addedPartitionIds.add(partitionName);
                hivePartitions.add(hiveMetacatConverters.fromPartitionInfo(tableInfo, partitionInfo));
            } else {
                //unless we alterifExists
                if (partitionsSaveRequest.getAlterIfExists()) {
                    final Partition existingPartition = hiveMetacatConverters.fromPartitionInfo(tableInfo, partitionInfo);
                    existingPartitionIds.add(partitionName);
                    existingPartition.setParameters(hivePartition.getParameters());
                    existingPartition.setCreateTime(hivePartition.getCreateTime());
                    existingPartition.setLastAccessTime(hivePartition.getLastAccessTime());
                    existingHivePartitions.add(existingPartition);
                }
            }
        }
        final Set<String> deletePartitionIds = Sets.newHashSet();
        if (!partitionsSaveRequest.getAlterIfExists()) {
            deletePartitionIds.addAll(existingPartitionIds);
        }
        if (partitionsSaveRequest.getPartitionIdsForDeletes() != null) {
            deletePartitionIds.addAll(partitionsSaveRequest.getPartitionIdsForDeletes());
        }
        if (partitionsSaveRequest.getAlterIfExists() && !existingHivePartitions.isEmpty()) {
            copyTableSdToPartitionSd(existingHivePartitions, table);
            metacatHiveClient.alterPartitions(databasename, tablename, existingHivePartitions);
        }
        copyTableSdToPartitionSd(hivePartitions, table);
        metacatHiveClient.addDropPartitions(databasename, tablename, hivePartitions, Lists.newArrayList(deletePartitionIds));
        final PartitionsSaveResponse result = new PartitionsSaveResponse();
        result.setAdded(addedPartitionIds);
        result.setUpdated(existingPartitionIds);
        return result;
    } catch (NoSuchObjectException exception) {
        if (exception.getMessage() != null && exception.getMessage().startsWith("Partition doesn't exist")) {
            throw new PartitionNotFoundException(tableName, "", exception);
        } else {
            throw new TableNotFoundException(tableName, exception);
        }
    } catch (MetaException | InvalidObjectException exception) {
        throw new InvalidMetaException("One or more partitions are invalid.", exception);
    } catch (AlreadyExistsException e) {
        final List<String> ids = getFakePartitionName(hivePartitions);
        throw new PartitionAlreadyExistsException(tableName, ids, e);
    } catch (TException exception) {
        throw new ConnectorException(String.format("Failed savePartitions hive table %s", tableName), exception);
    }
}
Also used : MetaException(org.apache.hadoop.hive.metastore.api.MetaException) SortOrder(com.netflix.metacat.common.dto.SortOrder) HashMap(java.util.HashMap) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) Partition(org.apache.hadoop.hive.metastore.api.Partition) Function(java.util.function.Function) Warehouse(org.apache.hadoop.hive.metastore.Warehouse) ArrayList(java.util.ArrayList) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) Inject(javax.inject.Inject) LinkedHashMap(java.util.LinkedHashMap) Strings(com.google.common.base.Strings) ConnectorPartitionService(com.netflix.metacat.common.server.connectors.ConnectorPartitionService) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException) Lists(com.google.common.collect.Lists) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) PartitionInfo(com.netflix.metacat.common.server.connectors.model.PartitionInfo) Map(java.util.Map) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) Named(javax.inject.Named) HiveConnectorInfoConverter(com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter) PartitionUtil(com.netflix.metacat.common.server.partition.util.PartitionUtil) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) NonNull(lombok.NonNull) Pageable(com.netflix.metacat.common.dto.Pageable) TException(org.apache.thrift.TException) Set(java.util.Set) QualifiedName(com.netflix.metacat.common.QualifiedName) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) TableNotFoundException(com.netflix.metacat.common.server.connectors.exception.TableNotFoundException) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionsSaveResponse(com.netflix.metacat.common.server.connectors.model.PartitionsSaveResponse) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) List(java.util.List) TableInfo(com.netflix.metacat.common.server.connectors.model.TableInfo) PartitionAlreadyExistsException(com.netflix.metacat.common.server.connectors.exception.PartitionAlreadyExistsException) PartitionsSaveRequest(com.netflix.metacat.common.server.connectors.model.PartitionsSaveRequest) PartitionListRequest(com.netflix.metacat.common.server.connectors.model.PartitionListRequest) ConnectorUtils(com.netflix.metacat.common.server.connectors.ConnectorUtils) PartitionNotFoundException(com.netflix.metacat.common.server.connectors.exception.PartitionNotFoundException) Collections(java.util.Collections) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Sort(com.netflix.metacat.common.dto.Sort) TException(org.apache.thrift.TException) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) PartitionAlreadyExistsException(com.netflix.metacat.common.server.connectors.exception.PartitionAlreadyExistsException) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException) TableNotFoundException(com.netflix.metacat.common.server.connectors.exception.TableNotFoundException) PartitionNotFoundException(com.netflix.metacat.common.server.connectors.exception.PartitionNotFoundException) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) PartitionsSaveResponse(com.netflix.metacat.common.server.connectors.model.PartitionsSaveResponse) TableInfo(com.netflix.metacat.common.server.connectors.model.TableInfo) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) ArrayList(java.util.ArrayList) List(java.util.List) PartitionInfo(com.netflix.metacat.common.server.connectors.model.PartitionInfo) PartitionAlreadyExistsException(com.netflix.metacat.common.server.connectors.exception.PartitionAlreadyExistsException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException)

Example 10 with NonNull

use of lombok.NonNull in project metacat by Netflix.

the class HiveConnectorDatabaseService method list.

/**
     * {@inheritDoc}.
     */
@Override
public List<DatabaseInfo> list(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final QualifiedName name, @Nullable final QualifiedName prefix, @Nullable final Sort sort, @Nullable final Pageable pageable) {
    try {
        final List<DatabaseInfo> databaseInfos = Lists.newArrayList();
        for (String databaseName : metacatHiveClient.getAllDatabases()) {
            final QualifiedName qualifiedName = QualifiedName.ofDatabase(name.getCatalogName(), databaseName);
            if (!qualifiedName.toString().startsWith(prefix.toString())) {
                continue;
            }
            databaseInfos.add(DatabaseInfo.builder().name(qualifiedName).build());
        }
        //supporting sort by name only
        if (sort != null) {
            ConnectorUtils.sort(databaseInfos, sort, Comparator.comparing(p -> p.getName().getDatabaseName()));
        }
        return ConnectorUtils.paginate(databaseInfos, pageable);
    } catch (MetaException exception) {
        throw new InvalidMetaException(name, exception);
    } catch (TException exception) {
        throw new ConnectorException(String.format("Failed list hive database %s", name), exception);
    }
}
Also used : MetaException(org.apache.hadoop.hive.metastore.api.MetaException) DatabaseAlreadyExistsException(com.netflix.metacat.common.server.connectors.exception.DatabaseAlreadyExistsException) DatabaseNotFoundException(com.netflix.metacat.common.server.connectors.exception.DatabaseNotFoundException) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) Inject(javax.inject.Inject) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException) DatabaseInfo(com.netflix.metacat.common.server.connectors.model.DatabaseInfo) Lists(com.google.common.collect.Lists) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) MetacatNotSupportedException(com.netflix.metacat.common.exception.MetacatNotSupportedException) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) Named(javax.inject.Named) HiveConnectorInfoConverter(com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) ConnectorDatabaseService(com.netflix.metacat.common.server.connectors.ConnectorDatabaseService) NonNull(lombok.NonNull) Pageable(com.netflix.metacat.common.dto.Pageable) TException(org.apache.thrift.TException) QualifiedName(com.netflix.metacat.common.QualifiedName) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) List(java.util.List) ConnectorUtils(com.netflix.metacat.common.server.connectors.ConnectorUtils) Comparator(java.util.Comparator) Database(org.apache.hadoop.hive.metastore.api.Database) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Sort(com.netflix.metacat.common.dto.Sort) TException(org.apache.thrift.TException) DatabaseInfo(com.netflix.metacat.common.server.connectors.model.DatabaseInfo) QualifiedName(com.netflix.metacat.common.QualifiedName) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException)

Aggregations

NonNull (lombok.NonNull)19 Lists (com.google.common.collect.Lists)8 QualifiedName (com.netflix.metacat.common.QualifiedName)8 ConnectorContext (com.netflix.metacat.common.server.connectors.ConnectorContext)8 HiveConnectorInfoConverter (com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter)8 List (java.util.List)8 Nonnull (javax.annotation.Nonnull)8 Inject (javax.inject.Inject)8 Named (javax.inject.Named)8 Pageable (com.netflix.metacat.common.dto.Pageable)7 Sort (com.netflix.metacat.common.dto.Sort)7 ConnectorException (com.netflix.metacat.common.server.connectors.exception.ConnectorException)7 Map (java.util.Map)7 Nullable (javax.annotation.Nullable)7 Strings (com.google.common.base.Strings)6 Maps (com.google.common.collect.Maps)6 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)6 Throwables (com.google.common.base.Throwables)5 PartitionInfo (com.netflix.metacat.common.server.connectors.model.PartitionInfo)5 PartitionListRequest (com.netflix.metacat.common.server.connectors.model.PartitionListRequest)5