Search in sources :

Example 1 with ConnectorContext

use of com.netflix.metacat.common.server.connectors.ConnectorContext in project metacat by Netflix.

the class PartitionServiceImpl method getPartitionKeys.

@Override
public List<String> getPartitionKeys(final QualifiedName name, final String filter, final List<String> partitionNames, final Sort sort, final Pageable pageable) {
    List<String> result = Lists.newArrayList();
    if (tableService.exists(name)) {
        final MetacatRequestContext metacatRequestContext = MetacatContextManager.getContext();
        final ConnectorPartitionService service = connectorManager.getPartitionService(name.getCatalogName());
        final GetPartitionsRequestDto requestDto = new GetPartitionsRequestDto();
        requestDto.setFilter(filter);
        requestDto.setPartitionNames(partitionNames);
        final ConnectorContext connectorContext = converterUtil.toConnectorContext(metacatRequestContext);
        try {
            result = service.getPartitionKeys(connectorContext, name, converterUtil.toPartitionListRequest(requestDto, pageable, sort));
        } catch (final UnsupportedOperationException uoe) {
            log.debug("Catalog {} doesn't support getPartitionKeys. Ignoring.", name.getCatalogName());
        }
    }
    return result;
}
Also used : MetacatRequestContext(com.netflix.metacat.common.MetacatRequestContext) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) ConnectorPartitionService(com.netflix.metacat.common.server.connectors.ConnectorPartitionService) GetPartitionsRequestDto(com.netflix.metacat.common.dto.GetPartitionsRequestDto)

Example 2 with ConnectorContext

use of com.netflix.metacat.common.server.connectors.ConnectorContext in project metacat by Netflix.

the class HiveConnectorFastPartitionService method getPartitionCount.

/**
     * Number of partitions for the given table.
     *
     * @param tableName tableName
     * @return Number of partitions
     */
@Override
public int getPartitionCount(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final QualifiedName tableName) {
    final long start = registry.clock().monotonicTime();
    final Map<String, String> tags = new HashMap<String, String>();
    tags.put("request", HiveMetrics.getPartitionCount.name());
    final Integer result;
    final DataSource dataSource = DataSourceManager.get().get(catalogName);
    try (Connection conn = dataSource.getConnection()) {
        // Handler for reading the result set
        final ResultSetHandler<Integer> handler = rs -> {
            int count = 0;
            while (rs.next()) {
                count = rs.getInt("count");
            }
            return count;
        };
        result = new QueryRunner().query(conn, SQL_GET_PARTITION_COUNT, handler, tableName.getDatabaseName(), tableName.getTableName());
    } catch (SQLException e) {
        throw new ConnectorException("getPartitionCount", e);
    } finally {
        final long duration = registry.clock().monotonicTime() - start;
        log.debug("### Time taken to complete getPartitionCount is {} ms", duration);
        this.registry.timer(requestTimerId.withTags(tags)).record(duration, TimeUnit.MILLISECONDS);
    }
    return result;
}
Also used : Connection(java.sql.Connection) PartitionKeyParserEval(com.netflix.metacat.common.server.partition.visitor.PartitionKeyParserEval) Date(java.util.Date) PartitionFilterGenerator(com.netflix.metacat.connector.hive.util.PartitionFilterGenerator) PartitionParamParserEval(com.netflix.metacat.common.server.partition.visitor.PartitionParamParserEval) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) PartitionInfo(com.netflix.metacat.common.server.connectors.model.PartitionInfo) Map(java.util.Map) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) StorageInfo(com.netflix.metacat.common.server.connectors.model.StorageInfo) QueryRunner(org.apache.commons.dbutils.QueryRunner) NonNull(lombok.NonNull) Collection(java.util.Collection) Pageable(com.netflix.metacat.common.dto.Pageable) QualifiedName(com.netflix.metacat.common.QualifiedName) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) HiveMetrics(com.netflix.metacat.connector.hive.monitoring.HiveMetrics) Slf4j(lombok.extern.slf4j.Slf4j) List(java.util.List) ResultSetHandler(org.apache.commons.dbutils.ResultSetHandler) Joiner(com.google.common.base.Joiner) Sort(com.netflix.metacat.common.dto.Sort) ListenableFuture(com.google.common.util.concurrent.ListenableFuture) AuditInfo(com.netflix.metacat.common.server.connectors.model.AuditInfo) HashMap(java.util.HashMap) Id(com.netflix.spectator.api.Id) ArrayList(java.util.ArrayList) Inject(javax.inject.Inject) Strings(com.google.common.base.Strings) SQLException(java.sql.SQLException) Lists(com.google.common.collect.Lists) ThreadServiceManager(com.netflix.metacat.common.server.util.ThreadServiceManager) DataSource(javax.sql.DataSource) PartitionParser(com.netflix.metacat.common.server.partition.parser.PartitionParser) Named(javax.inject.Named) HiveConnectorInfoConverter(com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) PartitionDetail(com.netflix.metacat.connector.hive.util.PartitionDetail) Functions(com.google.common.base.Functions) DataSourceManager(com.netflix.metacat.common.server.util.DataSourceManager) Throwables(com.google.common.base.Throwables) Maps(com.google.common.collect.Maps) FilterPartition(com.netflix.metacat.common.server.partition.util.FilterPartition) TimeUnit(java.util.concurrent.TimeUnit) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) Futures(com.google.common.util.concurrent.Futures) StringReader(java.io.StringReader) Registry(com.netflix.spectator.api.Registry) PartitionListRequest(com.netflix.metacat.common.server.connectors.model.PartitionListRequest) HashMap(java.util.HashMap) SQLException(java.sql.SQLException) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) Connection(java.sql.Connection) QueryRunner(org.apache.commons.dbutils.QueryRunner) DataSource(javax.sql.DataSource)

Example 3 with ConnectorContext

use of com.netflix.metacat.common.server.connectors.ConnectorContext in project metacat by Netflix.

the class HiveConnectorTableService method list.

/**
     * {@inheritDoc}.
     */
@Override
public List<TableInfo> list(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final QualifiedName name, @Nullable final QualifiedName prefix, @Nullable final Sort sort, @Nullable final Pageable pageable) {
    try {
        final List<TableInfo> tableInfos = Lists.newArrayList();
        for (String tableName : metacatHiveClient.getAllTables(name.getDatabaseName())) {
            final QualifiedName qualifiedName = QualifiedName.ofDatabase(name.getCatalogName(), tableName);
            if (!qualifiedName.toString().startsWith(prefix.toString())) {
                continue;
            }
            final Table table = metacatHiveClient.getTableByName(name.getDatabaseName(), tableName);
            tableInfos.add(hiveMetacatConverters.toTableInfo(name, table));
        }
        //supporting sort by name only
        if (sort != null) {
            ConnectorUtils.sort(tableInfos, sort, Comparator.comparing(p -> p.getName().getTableName()));
        }
        return ConnectorUtils.paginate(tableInfos, pageable);
    } catch (MetaException exception) {
        throw new DatabaseNotFoundException(name, exception);
    } catch (TException exception) {
        throw new ConnectorException(String.format("Failed list hive table %s", name), exception);
    }
}
Also used : MetaException(org.apache.hadoop.hive.metastore.api.MetaException) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) DatabaseNotFoundException(com.netflix.metacat.common.server.connectors.exception.DatabaseNotFoundException) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) Inject(javax.inject.Inject) Strings(com.google.common.base.Strings) ConnectorTableService(com.netflix.metacat.common.server.connectors.ConnectorTableService) FieldInfo(com.netflix.metacat.common.server.connectors.model.FieldInfo) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException) Lists(com.google.common.collect.Lists) ImmutableList(com.google.common.collect.ImmutableList) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) Map(java.util.Map) Path(org.apache.hadoop.fs.Path) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) StorageInfo(com.netflix.metacat.common.server.connectors.model.StorageInfo) Named(javax.inject.Named) HiveConnectorInfoConverter(com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) ImmutableMap(com.google.common.collect.ImmutableMap) NonNull(lombok.NonNull) Pageable(com.netflix.metacat.common.dto.Pageable) TException(org.apache.thrift.TException) QualifiedName(com.netflix.metacat.common.QualifiedName) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) TableNotFoundException(com.netflix.metacat.common.server.connectors.exception.TableNotFoundException) Maps(com.google.common.collect.Maps) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) List(java.util.List) TableInfo(com.netflix.metacat.common.server.connectors.model.TableInfo) TableAlreadyExistsException(com.netflix.metacat.common.server.connectors.exception.TableAlreadyExistsException) TableType(org.apache.hadoop.hive.metastore.TableType) ConnectorUtils(com.netflix.metacat.common.server.connectors.ConnectorUtils) Comparator(java.util.Comparator) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Sort(com.netflix.metacat.common.dto.Sort) TException(org.apache.thrift.TException) Table(org.apache.hadoop.hive.metastore.api.Table) QualifiedName(com.netflix.metacat.common.QualifiedName) DatabaseNotFoundException(com.netflix.metacat.common.server.connectors.exception.DatabaseNotFoundException) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) TableInfo(com.netflix.metacat.common.server.connectors.model.TableInfo) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException)

Example 4 with ConnectorContext

use of com.netflix.metacat.common.server.connectors.ConnectorContext in project metacat by Netflix.

the class CatalogManager method loadCatalog.

private void loadCatalog(final File file) throws Exception {
    log.info("-- Loading catalog {} --", file);
    final Map<String, String> properties = new HashMap<>(this.loadProperties(file));
    final String connectorType = properties.remove(MetacatCatalogConfig.Keys.CONNECTOR_NAME);
    Preconditions.checkState(connectorType != null, "Catalog configuration %s does not contain connector.name", file.getAbsoluteFile());
    // Catalog shard name should be unique. Usually the catalog name is same as the catalog shard name.
    // If multiple catalog property files refer the same catalog name, then there will be multiple shard names
    // with the same catalog name.
    final String catalogShardName = Files.getNameWithoutExtension(file.getName());
    // If catalog name is not specified, then use the catalog shard name.
    final String catalogName = properties.getOrDefault(MetacatCatalogConfig.Keys.CATALOG_NAME, catalogShardName);
    final ConnectorContext connectorContext = new ConnectorContext(catalogName, catalogShardName, connectorType, config, registry, properties);
    this.connectorManager.createConnection(connectorContext);
    log.info("-- Added catalog {} shard {} using connector {} --", catalogName, catalogShardName, connectorType);
}
Also used : HashMap(java.util.HashMap) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext)

Example 5 with ConnectorContext

use of com.netflix.metacat.common.server.connectors.ConnectorContext in project metacat by Netflix.

the class PolarisConnectorTableServiceTest method init.

/**
 * Initialization.
 */
@BeforeEach
public void init() {
    connectorContext = new ConnectorContext(CATALOG_NAME, CATALOG_NAME, "polaris", new DefaultConfigImpl(new MetacatProperties()), new NoopRegistry(), null, Maps.newHashMap());
    polarisDBService = new PolarisConnectorDatabaseService(polarisStoreService, connectorContext);
    polarisTableService = new PolarisConnectorTableService(polarisStoreService, CATALOG_NAME, polarisDBService, new HiveConnectorInfoConverter(new HiveTypeConverter()), new IcebergTableHandler(connectorContext, new IcebergTableCriteriaImpl(connectorContext), new IcebergTableOpWrapper(connectorContext, serviceManager), new IcebergTableOpsProxy()), new PolarisTableMapper(CATALOG_NAME), connectorContext);
}
Also used : HiveTypeConverter(com.netflix.metacat.connector.hive.converters.HiveTypeConverter) IcebergTableOpsProxy(com.netflix.metacat.connector.hive.iceberg.IcebergTableOpsProxy) PolarisTableMapper(com.netflix.metacat.connector.polaris.mappers.PolarisTableMapper) IcebergTableHandler(com.netflix.metacat.connector.hive.iceberg.IcebergTableHandler) IcebergTableCriteriaImpl(com.netflix.metacat.connector.hive.iceberg.IcebergTableCriteriaImpl) DefaultConfigImpl(com.netflix.metacat.common.server.properties.DefaultConfigImpl) MetacatProperties(com.netflix.metacat.common.server.properties.MetacatProperties) NoopRegistry(com.netflix.spectator.api.NoopRegistry) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) IcebergTableOpWrapper(com.netflix.metacat.connector.hive.iceberg.IcebergTableOpWrapper) HiveConnectorInfoConverter(com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter) BeforeEach(org.junit.jupiter.api.BeforeEach)

Aggregations

ConnectorContext (com.netflix.metacat.common.server.connectors.ConnectorContext)17 QualifiedName (com.netflix.metacat.common.QualifiedName)10 Lists (com.google.common.collect.Lists)8 HiveConnectorInfoConverter (com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter)8 HashMap (java.util.HashMap)8 List (java.util.List)8 Map (java.util.Map)8 Nonnull (javax.annotation.Nonnull)8 Inject (javax.inject.Inject)8 Named (javax.inject.Named)8 Pageable (com.netflix.metacat.common.dto.Pageable)7 Sort (com.netflix.metacat.common.dto.Sort)7 Nullable (javax.annotation.Nullable)7 NonNull (lombok.NonNull)7 Strings (com.google.common.base.Strings)6 Maps (com.google.common.collect.Maps)6 ConnectorException (com.netflix.metacat.common.server.connectors.exception.ConnectorException)6 MetacatRequestContext (com.netflix.metacat.common.MetacatRequestContext)5 ConnectorPartitionService (com.netflix.metacat.common.server.connectors.ConnectorPartitionService)5 TableNotFoundException (com.netflix.metacat.common.server.connectors.exception.TableNotFoundException)5