Search in sources :

Example 11 with NonNull

use of lombok.NonNull in project metacat by Netflix.

the class HiveConnectorFastPartitionService method getPartitionKeys.

/**
     * {@inheritDoc}.
     */
@Override
public List<String> getPartitionKeys(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final QualifiedName tableName, @Nonnull @NonNull final PartitionListRequest partitionsRequest) {
    final long start = registry.clock().monotonicTime();
    final Map<String, String> tags = new HashMap<String, String>();
    tags.put("request", "getPartitionKeys");
    final List<String> result;
    final List<String> partitionNames = partitionsRequest.getPartitionNames();
    final Sort sort = partitionsRequest.getSort();
    final Pageable pageable = partitionsRequest.getPageable();
    final String filterExpression = partitionsRequest.getFilter();
    if (filterExpression != null) {
        final FilterPartition filter = new FilterPartition();
        // batch exists
        final boolean isBatched = !Strings.isNullOrEmpty(filterExpression) && filterExpression.contains(FIELD_BATCHID);
        final boolean hasDateCreated = !Strings.isNullOrEmpty(filterExpression) && filterExpression.contains(FIELD_DATE_CREATED);
        // Handler for reading the result set
        final ResultSetHandler<List<String>> handler = rs -> {
            final List<String> names = Lists.newArrayList();
            while (rs.next()) {
                final String name = rs.getString("name");
                final String uri = rs.getString("uri");
                final long createdDate = rs.getLong(FIELD_DATE_CREATED);
                Map<String, String> values = null;
                if (hasDateCreated) {
                    values = Maps.newHashMap();
                    values.put(FIELD_DATE_CREATED, createdDate + "");
                }
                if (Strings.isNullOrEmpty(filterExpression) || filter.evaluatePartitionExpression(filterExpression, name, uri, isBatched, values)) {
                    names.add(name);
                }
            }
            return names;
        };
        result = getHandlerResults(tableName.getDatabaseName(), tableName.getTableName(), filterExpression, partitionNames, SQL_GET_PARTITIONS_WITH_KEY_URI, handler, sort, pageable);
    } else {
        // Handler for reading the result set
        final ResultSetHandler<List<String>> handler = rs -> {
            final List<String> names = Lists.newArrayList();
            while (rs.next()) {
                names.add(rs.getString("name"));
            }
            return names;
        };
        result = getHandlerResults(tableName.getDatabaseName(), tableName.getTableName(), null, partitionNames, SQL_GET_PARTITIONS_WITH_KEY, handler, sort, pageable);
    }
    final long duration = registry.clock().monotonicTime() - start;
    log.debug("### Time taken to complete getPartitionKeys is {} ms", duration);
    this.registry.timer(requestTimerId.withTags(tags)).record(duration, TimeUnit.MILLISECONDS);
    return result;
}
Also used : Connection(java.sql.Connection) PartitionKeyParserEval(com.netflix.metacat.common.server.partition.visitor.PartitionKeyParserEval) Date(java.util.Date) PartitionFilterGenerator(com.netflix.metacat.connector.hive.util.PartitionFilterGenerator) PartitionParamParserEval(com.netflix.metacat.common.server.partition.visitor.PartitionParamParserEval) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) PartitionInfo(com.netflix.metacat.common.server.connectors.model.PartitionInfo) Map(java.util.Map) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) StorageInfo(com.netflix.metacat.common.server.connectors.model.StorageInfo) QueryRunner(org.apache.commons.dbutils.QueryRunner) NonNull(lombok.NonNull) Collection(java.util.Collection) Pageable(com.netflix.metacat.common.dto.Pageable) QualifiedName(com.netflix.metacat.common.QualifiedName) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) HiveMetrics(com.netflix.metacat.connector.hive.monitoring.HiveMetrics) Slf4j(lombok.extern.slf4j.Slf4j) List(java.util.List) ResultSetHandler(org.apache.commons.dbutils.ResultSetHandler) Joiner(com.google.common.base.Joiner) Sort(com.netflix.metacat.common.dto.Sort) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) AuditInfo(com.netflix.metacat.common.server.connectors.model.AuditInfo) HashMap(java.util.HashMap) Id(com.netflix.spectator.api.Id) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) Strings(com.google.common.base.Strings) SQLException(java.sql.SQLException) Lists(com.google.common.collect.Lists) ThreadServiceManager(com.netflix.metacat.common.server.util.ThreadServiceManager) DataSource(javax.sql.DataSource) PartitionParser(com.netflix.metacat.common.server.partition.parser.PartitionParser) Named(javax.inject.Named) HiveConnectorInfoConverter(com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) PartitionDetail(com.netflix.metacat.connector.hive.util.PartitionDetail) Functions(com.google.common.base.Functions) DataSourceManager(com.netflix.metacat.common.server.util.DataSourceManager) Throwables(com.google.common.base.Throwables) Maps(com.google.common.collect.Maps) FilterPartition(com.netflix.metacat.common.server.partition.util.FilterPartition) TimeUnit(java.util.concurrent.TimeUnit) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Futures(com.google.common.util.concurrent.Futures) StringReader(java.io.StringReader) Registry(com.netflix.spectator.api.Registry) PartitionListRequest(com.netflix.metacat.common.server.connectors.model.PartitionListRequest) FilterPartition(com.netflix.metacat.common.server.partition.util.FilterPartition) HashMap(java.util.HashMap) Pageable(com.netflix.metacat.common.dto.Pageable) Sort(com.netflix.metacat.common.dto.Sort) List(java.util.List) ArrayList(java.util.ArrayList) Map(java.util.Map) HashMap(java.util.HashMap)

Example 12 with NonNull

use of lombok.NonNull in project metacat by Netflix.

the class HiveConnectorFastPartitionService method getPartitionNames.

/**
     * getPartitionNames.
     *
     * @param uris         uris
     * @param prefixSearch prefixSearch
     * @return partition names
     */
@Override
public Map<String, List<QualifiedName>> getPartitionNames(@Nonnull final ConnectorContext context, @Nonnull final List<String> uris, final boolean prefixSearch) {
    final long start = registry.clock().monotonicTime();
    final Map<String, String> tags = new HashMap<String, String>();
    tags.put("request", HiveMetrics.getPartitionNames.name());
    final Map<String, List<QualifiedName>> result = Maps.newHashMap();
    // Get data source
    final DataSource dataSource = DataSourceManager.get().get(catalogName);
    // Create the sql
    final StringBuilder queryBuilder = new StringBuilder(SQL_GET_PARTITION_NAMES_BY_URI);
    final List<String> params = Lists.newArrayList();
    if (prefixSearch) {
        queryBuilder.append(" 1=2");
        uris.forEach(uri -> {
            queryBuilder.append(" or location like ?");
            params.add(uri + "%");
        });
    } else {
        queryBuilder.append(" location in (");
        Joiner.on(',').appendTo(queryBuilder, uris.stream().map(uri -> "?").collect(Collectors.toList()));
        queryBuilder.append(")");
        params.addAll(uris);
    }
    // Handler for reading the result set
    final ResultSetHandler<Map<String, List<QualifiedName>>> handler = rs -> {
        while (rs.next()) {
            final String schemaName = rs.getString("schema_name");
            final String tableName = rs.getString("table_name");
            final String partitionName = rs.getString("partition_name");
            final String uri = rs.getString("location");
            final List<QualifiedName> partitionNames = result.get(uri);
            final QualifiedName qualifiedName = QualifiedName.ofPartition(catalogName, schemaName, tableName, partitionName);
            if (partitionNames == null) {
                result.put(uri, Lists.newArrayList(qualifiedName));
            } else {
                partitionNames.add(qualifiedName);
            }
        }
        return result;
    };
    try (Connection conn = dataSource.getConnection()) {
        new QueryRunner().query(conn, queryBuilder.toString(), handler, params.toArray());
    } catch (SQLException e) {
        Throwables.propagate(e);
    } finally {
        final long duration = registry.clock().monotonicTime() - start;
        log.debug("### Time taken to complete getPartitionNames is {} ms", duration);
        this.registry.timer(requestTimerId.withTags(tags)).record(duration, TimeUnit.MILLISECONDS);
    }
    return result;
}
Also used : Connection(java.sql.Connection) PartitionKeyParserEval(com.netflix.metacat.common.server.partition.visitor.PartitionKeyParserEval) Date(java.util.Date) PartitionFilterGenerator(com.netflix.metacat.connector.hive.util.PartitionFilterGenerator) PartitionParamParserEval(com.netflix.metacat.common.server.partition.visitor.PartitionParamParserEval) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) PartitionInfo(com.netflix.metacat.common.server.connectors.model.PartitionInfo) Map(java.util.Map) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) StorageInfo(com.netflix.metacat.common.server.connectors.model.StorageInfo) QueryRunner(org.apache.commons.dbutils.QueryRunner) NonNull(lombok.NonNull) Collection(java.util.Collection) Pageable(com.netflix.metacat.common.dto.Pageable) QualifiedName(com.netflix.metacat.common.QualifiedName) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) HiveMetrics(com.netflix.metacat.connector.hive.monitoring.HiveMetrics) Slf4j(lombok.extern.slf4j.Slf4j) List(java.util.List) ResultSetHandler(org.apache.commons.dbutils.ResultSetHandler) Joiner(com.google.common.base.Joiner) Sort(com.netflix.metacat.common.dto.Sort) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) AuditInfo(com.netflix.metacat.common.server.connectors.model.AuditInfo) HashMap(java.util.HashMap) Id(com.netflix.spectator.api.Id) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) Strings(com.google.common.base.Strings) SQLException(java.sql.SQLException) Lists(com.google.common.collect.Lists) ThreadServiceManager(com.netflix.metacat.common.server.util.ThreadServiceManager) DataSource(javax.sql.DataSource) PartitionParser(com.netflix.metacat.common.server.partition.parser.PartitionParser) Named(javax.inject.Named) HiveConnectorInfoConverter(com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) PartitionDetail(com.netflix.metacat.connector.hive.util.PartitionDetail) Functions(com.google.common.base.Functions) DataSourceManager(com.netflix.metacat.common.server.util.DataSourceManager) Throwables(com.google.common.base.Throwables) Maps(com.google.common.collect.Maps) FilterPartition(com.netflix.metacat.common.server.partition.util.FilterPartition) TimeUnit(java.util.concurrent.TimeUnit) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Futures(com.google.common.util.concurrent.Futures) StringReader(java.io.StringReader) Registry(com.netflix.spectator.api.Registry) PartitionListRequest(com.netflix.metacat.common.server.connectors.model.PartitionListRequest) HashMap(java.util.HashMap) SQLException(java.sql.SQLException) QualifiedName(com.netflix.metacat.common.QualifiedName) Connection(java.sql.Connection) QueryRunner(org.apache.commons.dbutils.QueryRunner) DataSource(javax.sql.DataSource) List(java.util.List) ArrayList(java.util.ArrayList) Map(java.util.Map) HashMap(java.util.HashMap)

Example 13 with NonNull

use of lombok.NonNull in project metacat by Netflix.

the class HiveConnectorFastTableService method getTableNames.

@Override
public Map<String, List<QualifiedName>> getTableNames(@Nonnull final ConnectorContext context, @Nonnull final List<String> uris, final boolean prefixSearch) {
    final long start = registry.clock().monotonicTime();
    final Map<String, String> tags = new HashMap<String, String>();
    tags.put("request", HiveMetrics.getTableNames.name());
    final Map<String, List<QualifiedName>> result = Maps.newHashMap();
    // Get data source
    final DataSource dataSource = DataSourceManager.get().get(catalogName);
    // Create the sql
    final StringBuilder queryBuilder = new StringBuilder(SQL_GET_TABLE_NAMES_BY_URI);
    final List<String> params = Lists.newArrayList();
    if (prefixSearch) {
        queryBuilder.append(" and (1=0");
        uris.forEach(uri -> {
            queryBuilder.append(" or location like ?");
            params.add(uri + "%");
        });
        queryBuilder.append(" )");
    } else {
        queryBuilder.append(" and location in (");
        uris.forEach(uri -> {
            queryBuilder.append("?,");
            params.add(uri);
        });
        queryBuilder.deleteCharAt(queryBuilder.length() - 1).append(")");
    }
    // Handler for reading the result set
    ResultSetHandler<Map<String, List<QualifiedName>>> handler = rs -> {
        while (rs.next()) {
            final String schemaName = rs.getString("schema_name");
            final String tableName = rs.getString("table_name");
            final String uri = rs.getString("location");
            List<QualifiedName> names = result.get(uri);
            if (names == null) {
                names = Lists.newArrayList();
                result.put(uri, names);
            }
            names.add(QualifiedName.ofTable(catalogName, schemaName, tableName));
        }
        return result;
    };
    try (Connection conn = dataSource.getConnection()) {
        new QueryRunner().query(conn, queryBuilder.toString(), handler, params.toArray());
    } catch (SQLException e) {
        throw Throwables.propagate(e);
    } finally {
        final long duration = registry.clock().monotonicTime() - start;
        log.debug("### Time taken to complete getTableNames is {} ms", duration);
        this.registry.timer(requestTimerId.withTags(tags)).record(duration, TimeUnit.MILLISECONDS);
    }
    return result;
}
Also used : Connection(java.sql.Connection) HashMap(java.util.HashMap) Id(com.netflix.spectator.api.Id) Inject(javax.inject.Inject) SQLException(java.sql.SQLException) Lists(com.google.common.collect.Lists) ThreadServiceManager(com.netflix.metacat.common.server.util.ThreadServiceManager) ScalarHandler(org.apache.commons.dbutils.handlers.ScalarHandler) Map(java.util.Map) DataSource(javax.sql.DataSource) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) Named(javax.inject.Named) HiveConnectorInfoConverter(com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter) Nonnull(javax.annotation.Nonnull) DataSourceManager(com.netflix.metacat.common.server.util.DataSourceManager) QueryRunner(org.apache.commons.dbutils.QueryRunner) NonNull(lombok.NonNull) Throwables(com.google.common.base.Throwables) QualifiedName(com.netflix.metacat.common.QualifiedName) Maps(com.google.common.collect.Maps) TimeUnit(java.util.concurrent.TimeUnit) HiveMetrics(com.netflix.metacat.connector.hive.monitoring.HiveMetrics) Slf4j(lombok.extern.slf4j.Slf4j) List(java.util.List) Registry(com.netflix.spectator.api.Registry) ResultSetHandler(org.apache.commons.dbutils.ResultSetHandler) HashMap(java.util.HashMap) SQLException(java.sql.SQLException) QualifiedName(com.netflix.metacat.common.QualifiedName) Connection(java.sql.Connection) QueryRunner(org.apache.commons.dbutils.QueryRunner) DataSource(javax.sql.DataSource) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map)

Example 14 with NonNull

use of lombok.NonNull in project cas by apereo.

the class SamlIdPObjectSigner method buildSignatureSigningParameters.

/**
 * Build signature signing parameters signature signing parameters.
 *
 * @param descriptor the descriptor
 * @param service    the service
 * @return the signature signing parameters
 * @throws SAMLException the saml exception
 */
@SneakyThrows
protected SignatureSigningParameters buildSignatureSigningParameters(final RoleDescriptor descriptor, final SamlRegisteredService service) throws SAMLException {
    final CriteriaSet criteria = new CriteriaSet();
    final SignatureSigningConfiguration signatureSigningConfiguration = getSignatureSigningConfiguration(descriptor, service);
    criteria.add(new SignatureSigningConfigurationCriterion(signatureSigningConfiguration));
    criteria.add(new RoleDescriptorCriterion(descriptor));
    final SAMLMetadataSignatureSigningParametersResolver resolver = new SAMLMetadataSignatureSigningParametersResolver();
    LOGGER.debug("Resolving signature signing parameters for [{}]", descriptor.getElementQName().getLocalPart());
    @NonNull final SignatureSigningParameters params = resolver.resolveSingle(criteria);
    LOGGER.debug("Created signature signing parameters." + "\nSignature algorithm: [{}]" + "\nSignature canonicalization algorithm: [{}]" + "\nSignature reference digest methods: [{}]", params.getSignatureAlgorithm(), params.getSignatureCanonicalizationAlgorithm(), params.getSignatureReferenceDigestMethod());
    return params;
}
Also used : SignatureSigningConfiguration(org.opensaml.xmlsec.SignatureSigningConfiguration) BasicSignatureSigningConfiguration(org.opensaml.xmlsec.impl.BasicSignatureSigningConfiguration) RoleDescriptorCriterion(org.opensaml.saml.criterion.RoleDescriptorCriterion) SignatureSigningParameters(org.opensaml.xmlsec.SignatureSigningParameters) SAMLMetadataSignatureSigningParametersResolver(org.opensaml.saml.security.impl.SAMLMetadataSignatureSigningParametersResolver) NonNull(lombok.NonNull) CriteriaSet(net.shibboleth.utilities.java.support.resolver.CriteriaSet) SignatureSigningConfigurationCriterion(org.opensaml.xmlsec.criterion.SignatureSigningConfigurationCriterion) SneakyThrows(lombok.SneakyThrows)

Example 15 with NonNull

use of lombok.NonNull in project cas by apereo.

the class SamlObjectSignatureValidator method validateSignatureOnProfileRequest.

private void validateSignatureOnProfileRequest(final RequestAbstractType profileRequest, final Signature signature, final RoleDescriptorResolver roleDescriptorResolver) throws Exception {
    final SAMLSignatureProfileValidator validator = new SAMLSignatureProfileValidator();
    LOGGER.debug("Validating profile signature for [{}] via [{}]...", profileRequest.getIssuer(), validator.getClass().getSimpleName());
    validator.validate(signature);
    LOGGER.debug("Successfully validated profile signature for [{}].", profileRequest.getIssuer());
    @NonNull final Set<Credential> credentials = getSigningCredential(roleDescriptorResolver, profileRequest);
    if (credentials.isEmpty()) {
        throw new SamlException("Signing credentials for validation could not be resolved based on the provided signature");
    }
    boolean foundValidCredential = false;
    final Iterator<Credential> it = credentials.iterator();
    while (!foundValidCredential && it.hasNext()) {
        try {
            final Credential c = it.next();
            LOGGER.debug("Validating signature using credentials for [{}]", c.getEntityId());
            SignatureValidator.validate(signature, c);
            LOGGER.info("Successfully validated the request signature.");
            foundValidCredential = true;
        } catch (final Exception e) {
            LOGGER.debug(e.getMessage(), e);
        }
    }
    if (!foundValidCredential) {
        LOGGER.error("No valid credentials could be found to verify the signature for [{}]", profileRequest.getIssuer());
        throw new SamlException("No valid signing credentials for validation could not be resolved");
    }
}
Also used : Credential(org.opensaml.security.credential.Credential) SAMLSignatureProfileValidator(org.opensaml.saml.security.impl.SAMLSignatureProfileValidator) NonNull(lombok.NonNull) SamlException(org.apereo.cas.support.saml.SamlException) SamlException(org.apereo.cas.support.saml.SamlException)

Aggregations

NonNull (lombok.NonNull)19 Lists (com.google.common.collect.Lists)8 QualifiedName (com.netflix.metacat.common.QualifiedName)8 ConnectorContext (com.netflix.metacat.common.server.connectors.ConnectorContext)8 HiveConnectorInfoConverter (com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter)8 List (java.util.List)8 Nonnull (javax.annotation.Nonnull)8 Inject (javax.inject.Inject)8 Named (javax.inject.Named)8 Pageable (com.netflix.metacat.common.dto.Pageable)7 Sort (com.netflix.metacat.common.dto.Sort)7 ConnectorException (com.netflix.metacat.common.server.connectors.exception.ConnectorException)7 Map (java.util.Map)7 Nullable (javax.annotation.Nullable)7 Strings (com.google.common.base.Strings)6 Maps (com.google.common.collect.Maps)6 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)6 Throwables (com.google.common.base.Throwables)5 PartitionInfo (com.netflix.metacat.common.server.connectors.model.PartitionInfo)5 PartitionListRequest (com.netflix.metacat.common.server.connectors.model.PartitionListRequest)5