Search in sources :

Example 11 with ResultSetExtractor

use of org.springframework.jdbc.core.ResultSetExtractor in project metacat by Netflix.

the class DirectSqlGetPartition method getPartitionCount.

/**
 * Number of partitions for the given table.
 *
 * @param requestContext request context
 * @param tableName      tableName
 * @return Number of partitions
 */
@Transactional(readOnly = true)
public int getPartitionCount(final ConnectorRequestContext requestContext, final QualifiedName tableName) {
    final long start = registry.clock().wallTime();
    // Handler for reading the result set
    final ResultSetExtractor<Integer> handler = rs -> {
        int count = 0;
        while (rs.next()) {
            count = rs.getInt("count");
        }
        return count;
    };
    try {
        final Optional<QualifiedName> sourceTable = getSourceTableName(tableName.getDatabaseName(), tableName.getTableName(), false);
        return sourceTable.map(qualifiedName -> jdbcTemplate.query(SQL.SQL_GET_AUDIT_TABLE_PARTITION_COUNT, new String[] { tableName.getDatabaseName(), tableName.getTableName(), qualifiedName.getDatabaseName(), qualifiedName.getTableName() }, new int[] { Types.VARCHAR, Types.VARCHAR, Types.VARCHAR, Types.VARCHAR }, handler)).orElseGet(() -> jdbcTemplate.query(SQL.SQL_GET_PARTITION_COUNT, new String[] { tableName.getDatabaseName(), tableName.getTableName() }, new int[] { Types.VARCHAR, Types.VARCHAR }, handler));
    } catch (Exception e) {
        throw new ConnectorException("Failed getting the partition count", e);
    } finally {
        this.fastServiceMetric.recordTimer(HiveMetrics.TagGetPartitionCount.getMetricName(), registry.clock().wallTime() - start);
    }
}
Also used : HiveConfigConstants(com.netflix.metacat.connector.hive.util.HiveConfigConstants) HiveConnectorFastServiceMetric(com.netflix.metacat.connector.hive.util.HiveConnectorFastServiceMetric) PartitionKeyParserEval(com.netflix.metacat.common.server.partition.visitor.PartitionKeyParserEval) Date(java.util.Date) TimeoutException(java.util.concurrent.TimeoutException) PartitionFilterGenerator(com.netflix.metacat.connector.hive.util.PartitionFilterGenerator) PartitionParamParserEval(com.netflix.metacat.common.server.partition.visitor.PartitionParamParserEval) Matcher(java.util.regex.Matcher) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) PartitionInfo(com.netflix.metacat.common.server.connectors.model.PartitionInfo) Map(java.util.Map) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) StorageInfo(com.netflix.metacat.common.server.connectors.model.StorageInfo) ConnectorRequestContext(com.netflix.metacat.common.server.connectors.ConnectorRequestContext) Collection(java.util.Collection) Pageable(com.netflix.metacat.common.dto.Pageable) QualifiedName(com.netflix.metacat.common.QualifiedName) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) HiveMetrics(com.netflix.metacat.connector.hive.monitoring.HiveMetrics) Slf4j(lombok.extern.slf4j.Slf4j) List(java.util.List) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) Joiner(com.google.common.base.Joiner) Sort(com.netflix.metacat.common.dto.Sort) Types(java.sql.Types) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) AuditInfo(com.netflix.metacat.common.server.connectors.model.AuditInfo) JdbcTemplate(org.springframework.jdbc.core.JdbcTemplate) Strings(com.google.common.base.Strings) Lists(com.google.common.collect.Lists) ThreadServiceManager(com.netflix.metacat.common.server.util.ThreadServiceManager) ImmutableList(com.google.common.collect.ImmutableList) Qualifier(org.springframework.beans.factory.annotation.Qualifier) PartitionParser(com.netflix.metacat.common.server.partition.parser.PartitionParser) Config(com.netflix.metacat.common.server.properties.Config) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) Throwables(com.google.common.base.Throwables) Maps(com.google.common.collect.Maps) Table(org.apache.hadoop.hive.metastore.api.Table) SqlParameterValue(org.springframework.jdbc.core.SqlParameterValue) FilterPartition(com.netflix.metacat.common.server.partition.util.FilterPartition) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Futures(com.google.common.util.concurrent.Futures) StringReader(java.io.StringReader) Registry(com.netflix.spectator.api.Registry) PartitionListRequest(com.netflix.metacat.common.server.connectors.model.PartitionListRequest) HiveFilterPartition(com.netflix.metacat.connector.hive.util.HiveFilterPartition) VisibleForTesting(com.google.common.annotations.VisibleForTesting) HivePartitionKeyParserEval(com.netflix.metacat.connector.hive.util.HivePartitionKeyParserEval) ResultSetExtractor(org.springframework.jdbc.core.ResultSetExtractor) Transactional(org.springframework.transaction.annotation.Transactional) QualifiedName(com.netflix.metacat.common.QualifiedName) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) TimeoutException(java.util.concurrent.TimeoutException) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) ExecutionException(java.util.concurrent.ExecutionException) Transactional(org.springframework.transaction.annotation.Transactional)

Example 12 with ResultSetExtractor

use of org.springframework.jdbc.core.ResultSetExtractor in project metacat by Netflix.

the class DirectSqlGetPartition method getPartitionNames.

/**
 * getPartitionNames.
 *
 * @param context      request context
 * @param uris         uris
 * @param prefixSearch prefixSearch
 * @return partition names
 */
@Transactional(readOnly = true)
public Map<String, List<QualifiedName>> getPartitionNames(@Nonnull final ConnectorRequestContext context, @Nonnull final List<String> uris, final boolean prefixSearch) {
    final long start = registry.clock().wallTime();
    final Map<String, List<QualifiedName>> result = Maps.newHashMap();
    // Create the sql
    final StringBuilder queryBuilder = new StringBuilder(SQL.SQL_GET_PARTITION_NAMES_BY_URI);
    final List<SqlParameterValue> params = Lists.newArrayList();
    if (prefixSearch) {
        queryBuilder.append(" 1=2");
        uris.forEach(uri -> {
            queryBuilder.append(" or location like ?");
            params.add(new SqlParameterValue(Types.VARCHAR, uri + "%"));
        });
    } else {
        queryBuilder.append(" location in (");
        Joiner.on(',').appendTo(queryBuilder, uris.stream().map(uri -> "?").collect(Collectors.toList()));
        queryBuilder.append(")");
        params.addAll(uris.stream().map(uri -> new SqlParameterValue(Types.VARCHAR, uri)).collect(Collectors.toList()));
    }
    final ResultSetExtractor<Map<String, List<QualifiedName>>> handler = rs -> {
        while (rs.next()) {
            final String schemaName = rs.getString("schema_name");
            final String tableName = rs.getString("table_name");
            final String partitionName = rs.getString("partition_name");
            final String uri = rs.getString("location");
            final List<QualifiedName> partitionNames = result.get(uri);
            final QualifiedName qualifiedName = QualifiedName.ofPartition(catalogName, schemaName, tableName, partitionName);
            if (partitionNames == null) {
                result.put(uri, Lists.newArrayList(qualifiedName));
            } else {
                partitionNames.add(qualifiedName);
            }
        }
        return result;
    };
    try {
        jdbcTemplate.query(queryBuilder.toString(), params.toArray(), handler);
    } finally {
        this.fastServiceMetric.recordTimer(HiveMetrics.TagGetPartitionNames.getMetricName(), registry.clock().wallTime() - start);
    }
    return result;
}
Also used : HiveConfigConstants(com.netflix.metacat.connector.hive.util.HiveConfigConstants) HiveConnectorFastServiceMetric(com.netflix.metacat.connector.hive.util.HiveConnectorFastServiceMetric) PartitionKeyParserEval(com.netflix.metacat.common.server.partition.visitor.PartitionKeyParserEval) Date(java.util.Date) TimeoutException(java.util.concurrent.TimeoutException) PartitionFilterGenerator(com.netflix.metacat.connector.hive.util.PartitionFilterGenerator) PartitionParamParserEval(com.netflix.metacat.common.server.partition.visitor.PartitionParamParserEval) Matcher(java.util.regex.Matcher) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) PartitionInfo(com.netflix.metacat.common.server.connectors.model.PartitionInfo) Map(java.util.Map) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) StorageInfo(com.netflix.metacat.common.server.connectors.model.StorageInfo) ConnectorRequestContext(com.netflix.metacat.common.server.connectors.ConnectorRequestContext) Collection(java.util.Collection) Pageable(com.netflix.metacat.common.dto.Pageable) QualifiedName(com.netflix.metacat.common.QualifiedName) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) HiveMetrics(com.netflix.metacat.connector.hive.monitoring.HiveMetrics) Slf4j(lombok.extern.slf4j.Slf4j) List(java.util.List) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) Joiner(com.google.common.base.Joiner) Sort(com.netflix.metacat.common.dto.Sort) Types(java.sql.Types) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) AuditInfo(com.netflix.metacat.common.server.connectors.model.AuditInfo) JdbcTemplate(org.springframework.jdbc.core.JdbcTemplate) Strings(com.google.common.base.Strings) Lists(com.google.common.collect.Lists) ThreadServiceManager(com.netflix.metacat.common.server.util.ThreadServiceManager) ImmutableList(com.google.common.collect.ImmutableList) Qualifier(org.springframework.beans.factory.annotation.Qualifier) PartitionParser(com.netflix.metacat.common.server.partition.parser.PartitionParser) Config(com.netflix.metacat.common.server.properties.Config) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) Throwables(com.google.common.base.Throwables) Maps(com.google.common.collect.Maps) Table(org.apache.hadoop.hive.metastore.api.Table) SqlParameterValue(org.springframework.jdbc.core.SqlParameterValue) FilterPartition(com.netflix.metacat.common.server.partition.util.FilterPartition) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Futures(com.google.common.util.concurrent.Futures) StringReader(java.io.StringReader) Registry(com.netflix.spectator.api.Registry) PartitionListRequest(com.netflix.metacat.common.server.connectors.model.PartitionListRequest) HiveFilterPartition(com.netflix.metacat.connector.hive.util.HiveFilterPartition) VisibleForTesting(com.google.common.annotations.VisibleForTesting) HivePartitionKeyParserEval(com.netflix.metacat.connector.hive.util.HivePartitionKeyParserEval) ResultSetExtractor(org.springframework.jdbc.core.ResultSetExtractor) Transactional(org.springframework.transaction.annotation.Transactional) SqlParameterValue(org.springframework.jdbc.core.SqlParameterValue) QualifiedName(com.netflix.metacat.common.QualifiedName) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Map(java.util.Map) Transactional(org.springframework.transaction.annotation.Transactional)

Aggregations

ResultSetExtractor (org.springframework.jdbc.core.ResultSetExtractor)12 Strings (com.google.common.base.Strings)10 Lists (com.google.common.collect.Lists)10 Maps (com.google.common.collect.Maps)10 QualifiedName (com.netflix.metacat.common.QualifiedName)10 Config (com.netflix.metacat.common.server.properties.Config)10 Types (java.sql.Types)10 List (java.util.List)10 Map (java.util.Map)10 Collectors (java.util.stream.Collectors)10 Slf4j (lombok.extern.slf4j.Slf4j)10 JdbcTemplate (org.springframework.jdbc.core.JdbcTemplate)10 SqlParameterValue (org.springframework.jdbc.core.SqlParameterValue)10 Transactional (org.springframework.transaction.annotation.Transactional)10 Joiner (com.google.common.base.Joiner)9 VisibleForTesting (com.google.common.annotations.VisibleForTesting)7 ConnectorContext (com.netflix.metacat.common.server.connectors.ConnectorContext)5 ConnectorException (com.netflix.metacat.common.server.connectors.exception.ConnectorException)5 HiveMetrics (com.netflix.metacat.connector.hive.monitoring.HiveMetrics)5 HiveConnectorFastServiceMetric (com.netflix.metacat.connector.hive.util.HiveConnectorFastServiceMetric)5