use of lombok.NonNull in project cas by apereo.
the class SamlIdPObjectSigner method prepareSecurityParametersContext.
/**
* Prepare security parameters context.
*
* @param <T> the type parameter
* @param adaptor the adaptor
* @param outboundContext the outbound context
* @param service the service
* @throws SAMLException the saml exception
*/
protected <T extends SAMLObject> void prepareSecurityParametersContext(final SamlRegisteredServiceServiceProviderMetadataFacade adaptor, final MessageContext<T> outboundContext, final SamlRegisteredService service) throws SAMLException {
@NonNull final SecurityParametersContext secParametersContext = outboundContext.getSubcontext(SecurityParametersContext.class, true);
final RoleDescriptor roleDesc = adaptor.getSsoDescriptor();
final SignatureSigningParameters signingParameters = buildSignatureSigningParameters(roleDesc, service);
secParametersContext.setSignatureSigningParameters(signingParameters);
}
use of lombok.NonNull in project cas by apereo.
the class AbstractWrapperAuthenticationHandler method doAuthentication.
@Override
protected AuthenticationHandlerExecutionResult doAuthentication(final Credential credential) throws GeneralSecurityException {
final C credentials = convertToPac4jCredentials((I) credential);
LOGGER.debug("credentials: [{}]", credentials);
try {
@NonNull final Authenticator authenticator = getAuthenticator(credential);
if (authenticator instanceof InitializableObject) {
((InitializableObject) authenticator).init();
}
authenticator.validate(credentials, getWebContext());
final UserProfile profile = this.profileCreator.create(credentials, getWebContext());
LOGGER.debug("profile: [{}]", profile);
return createResult(new ClientCredential(credentials, authenticator.getClass().getSimpleName()), profile);
} catch (final Exception e) {
LOGGER.error("Failed to validate credentials", e);
throw new FailedLoginException("Failed to validate credentials: " + e.getMessage());
}
}
use of lombok.NonNull in project cas by apereo.
the class PolicyBasedAuthenticationManager method authenticateInternal.
/**
* Authenticate internal authentication builder.
*
* @param transaction the transaction
* @return the authentication builder
* @throws AuthenticationException the authentication exception
*/
protected AuthenticationBuilder authenticateInternal(final AuthenticationTransaction transaction) throws AuthenticationException {
final Collection<Credential> credentials = transaction.getCredentials();
LOGGER.debug("Authentication credentials provided for this transaction are [{}]", credentials);
if (credentials.isEmpty()) {
LOGGER.error("Resolved authentication handlers for this transaction are empty");
throw new AuthenticationException("Resolved credentials for this transaction are empty");
}
final AuthenticationBuilder builder = new DefaultAuthenticationBuilder(NullPrincipal.getInstance());
credentials.stream().forEach(cred -> builder.addCredential(new BasicCredentialMetaData(cred)));
@NonNull final Set<AuthenticationHandler> handlerSet = getAuthenticationHandlersForThisTransaction(transaction);
LOGGER.debug("Candidate resolved authentication handlers for this transaction are [{}]", handlerSet);
if (handlerSet.isEmpty()) {
LOGGER.error("Resolved authentication handlers for this transaction are empty");
throw new AuthenticationException(builder.getFailures(), builder.getSuccesses());
}
try {
final Iterator<Credential> it = credentials.iterator();
AuthenticationCredentialsThreadLocalBinder.clearInProgressAuthentication();
while (it.hasNext()) {
final Credential credential = it.next();
LOGGER.debug("Attempting to authenticate credential [{}]", credential);
final Iterator<AuthenticationHandler> itHandlers = handlerSet.iterator();
boolean proceedWithNextHandler = true;
while (proceedWithNextHandler && itHandlers.hasNext()) {
final AuthenticationHandler handler = itHandlers.next();
if (handler.supports(credential)) {
try {
final PrincipalResolver resolver = getPrincipalResolverLinkedToHandlerIfAny(handler, transaction);
LOGGER.debug("Attempting authentication of [{}] using [{}]", credential.getId(), handler.getName());
authenticateAndResolvePrincipal(builder, credential, resolver, handler);
AuthenticationCredentialsThreadLocalBinder.bindInProgress(builder.build());
final Pair<Boolean, Set<Throwable>> failures = evaluateAuthenticationPolicies(builder.build(), transaction);
proceedWithNextHandler = !failures.getKey();
} catch (final Exception e) {
LOGGER.error("Authentication has failed. Credentials may be incorrect or CAS cannot " + "find authentication handler that supports [{}] of type [{}]. Examine the configuration to " + "ensure a method of authentication is defined and analyze CAS logs at DEBUG level to trace " + "the authentication event.", credential, credential.getClass().getSimpleName());
handleAuthenticationException(e, handler.getName(), builder);
proceedWithNextHandler = true;
}
} else {
LOGGER.debug("Authentication handler [{}] does not support the credential type [{}]. Trying next...", handler.getName(), credential);
}
}
}
evaluateFinalAuthentication(builder, transaction);
return builder;
} finally {
AuthenticationCredentialsThreadLocalBinder.clearInProgressAuthentication();
}
}
use of lombok.NonNull in project metacat by Netflix.
the class HiveConnectorPartitionService method savePartitions.
/**
* {@inheritDoc}.
*/
@Override
public PartitionsSaveResponse savePartitions(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final QualifiedName tableName, @Nonnull @NonNull final PartitionsSaveRequest partitionsSaveRequest) {
final String databasename = tableName.getDatabaseName();
final String tablename = tableName.getTableName();
// New partitions
final List<Partition> hivePartitions = Lists.newArrayList();
try {
final Table table = metacatHiveClient.getTableByName(databasename, tablename);
final List<PartitionInfo> partitionInfos = partitionsSaveRequest.getPartitions();
// New partition ids
final List<String> addedPartitionIds = Lists.newArrayList();
// Updated partition ids
final List<String> existingPartitionIds = Lists.newArrayList();
// Existing partitions
final List<Partition> existingHivePartitions = Lists.newArrayList();
// Existing partition map
Map<String, Partition> existingPartitionMap = Collections.emptyMap();
if (partitionsSaveRequest.getCheckIfExists()) {
final List<String> partitionNames = partitionInfos.stream().map(partition -> {
final String partitionName = partition.getName().getPartitionName();
PartitionUtil.validatePartitionName(partitionName, getPartitionKeys(table.getPartitionKeys()));
return partitionName;
}).collect(Collectors.toList());
existingPartitionMap = getPartitionsByNames(table, partitionNames);
}
final TableInfo tableInfo = hiveMetacatConverters.toTableInfo(tableName, table);
for (PartitionInfo partitionInfo : partitionInfos) {
final String partitionName = partitionInfo.getName().getPartitionName();
final Partition hivePartition = existingPartitionMap.get(partitionName);
if (hivePartition == null) {
addedPartitionIds.add(partitionName);
hivePartitions.add(hiveMetacatConverters.fromPartitionInfo(tableInfo, partitionInfo));
} else {
//unless we alterifExists
if (partitionsSaveRequest.getAlterIfExists()) {
final Partition existingPartition = hiveMetacatConverters.fromPartitionInfo(tableInfo, partitionInfo);
existingPartitionIds.add(partitionName);
existingPartition.setParameters(hivePartition.getParameters());
existingPartition.setCreateTime(hivePartition.getCreateTime());
existingPartition.setLastAccessTime(hivePartition.getLastAccessTime());
existingHivePartitions.add(existingPartition);
}
}
}
final Set<String> deletePartitionIds = Sets.newHashSet();
if (!partitionsSaveRequest.getAlterIfExists()) {
deletePartitionIds.addAll(existingPartitionIds);
}
if (partitionsSaveRequest.getPartitionIdsForDeletes() != null) {
deletePartitionIds.addAll(partitionsSaveRequest.getPartitionIdsForDeletes());
}
if (partitionsSaveRequest.getAlterIfExists() && !existingHivePartitions.isEmpty()) {
copyTableSdToPartitionSd(existingHivePartitions, table);
metacatHiveClient.alterPartitions(databasename, tablename, existingHivePartitions);
}
copyTableSdToPartitionSd(hivePartitions, table);
metacatHiveClient.addDropPartitions(databasename, tablename, hivePartitions, Lists.newArrayList(deletePartitionIds));
final PartitionsSaveResponse result = new PartitionsSaveResponse();
result.setAdded(addedPartitionIds);
result.setUpdated(existingPartitionIds);
return result;
} catch (NoSuchObjectException exception) {
if (exception.getMessage() != null && exception.getMessage().startsWith("Partition doesn't exist")) {
throw new PartitionNotFoundException(tableName, "", exception);
} else {
throw new TableNotFoundException(tableName, exception);
}
} catch (MetaException | InvalidObjectException exception) {
throw new InvalidMetaException("One or more partitions are invalid.", exception);
} catch (AlreadyExistsException e) {
final List<String> ids = getFakePartitionName(hivePartitions);
throw new PartitionAlreadyExistsException(tableName, ids, e);
} catch (TException exception) {
throw new ConnectorException(String.format("Failed savePartitions hive table %s", tableName), exception);
}
}
use of lombok.NonNull in project metacat by Netflix.
the class HiveConnectorDatabaseService method list.
/**
* {@inheritDoc}.
*/
@Override
public List<DatabaseInfo> list(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final QualifiedName name, @Nullable final QualifiedName prefix, @Nullable final Sort sort, @Nullable final Pageable pageable) {
try {
final List<DatabaseInfo> databaseInfos = Lists.newArrayList();
for (String databaseName : metacatHiveClient.getAllDatabases()) {
final QualifiedName qualifiedName = QualifiedName.ofDatabase(name.getCatalogName(), databaseName);
if (!qualifiedName.toString().startsWith(prefix.toString())) {
continue;
}
databaseInfos.add(DatabaseInfo.builder().name(qualifiedName).build());
}
//supporting sort by name only
if (sort != null) {
ConnectorUtils.sort(databaseInfos, sort, Comparator.comparing(p -> p.getName().getDatabaseName()));
}
return ConnectorUtils.paginate(databaseInfos, pageable);
} catch (MetaException exception) {
throw new InvalidMetaException(name, exception);
} catch (TException exception) {
throw new ConnectorException(String.format("Failed list hive database %s", name), exception);
}
}
Aggregations