Search in sources :

Example 16 with TableInfo

use of com.netflix.metacat.common.server.connectors.model.TableInfo in project metacat by Netflix.

the class JdbcConnectorTableService method get.

/**
 * {@inheritDoc}
 */
@Override
public TableInfo get(@Nonnull final ConnectorRequestContext context, @Nonnull final QualifiedName name) {
    log.debug("Beginning to get table metadata for qualified name {} for request {}", name, context);
    try (Connection connection = this.dataSource.getConnection()) {
        final String database = name.getDatabaseName();
        connection.setSchema(database);
        final ImmutableList.Builder<FieldInfo> fields = ImmutableList.builder();
        try (ResultSet columns = this.getColumns(connection, name)) {
            while (columns.next()) {
                final String type = columns.getString("TYPE_NAME");
                final String size = columns.getString("COLUMN_SIZE");
                final String precision = columns.getString("DECIMAL_DIGITS");
                final String sourceType = this.buildSourceType(type, size, precision);
                final FieldInfo.FieldInfoBuilder fieldInfo = FieldInfo.builder().name(columns.getString("COLUMN_NAME")).sourceType(sourceType).type(this.typeConverter.toMetacatType(sourceType)).comment(columns.getString("REMARKS")).isNullable(columns.getString("IS_NULLABLE").equals("YES")).defaultValue(columns.getString("COLUMN_DEF"));
                if (size != null) {
                    fieldInfo.size(Integer.parseInt(size));
                }
                fields.add(fieldInfo.build());
            }
        }
        final List<FieldInfo> fieldInfos = fields.build();
        // If table does not exist, throw TableNotFoundException.
        if (fieldInfos.isEmpty() && !exists(context, name)) {
            throw new TableNotFoundException(name);
        }
        // Set table details
        final TableInfo result = TableInfo.builder().name(name).fields(fields.build()).build();
        setTableInfoDetails(connection, result);
        log.debug("Finished getting table metadata for qualified name {} for request {}", name, context);
        return result;
    } catch (final SQLException se) {
        throw new ConnectorException(se.getMessage(), se);
    }
}
Also used : TableNotFoundException(com.netflix.metacat.common.server.connectors.exception.TableNotFoundException) SQLException(java.sql.SQLException) ImmutableList(com.google.common.collect.ImmutableList) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) TableInfo(com.netflix.metacat.common.server.connectors.model.TableInfo) FieldInfo(com.netflix.metacat.common.server.connectors.model.FieldInfo)

Example 17 with TableInfo

use of com.netflix.metacat.common.server.connectors.model.TableInfo in project metacat by Netflix.

the class CassandraConnectorTableService method list.

/**
 * {@inheritDoc}
 */
@Override
public List<TableInfo> list(@Nonnull @NonNull final ConnectorRequestContext context, @Nonnull @NonNull final QualifiedName name, @Nullable final QualifiedName prefix, @Nullable final Sort sort, @Nullable final Pageable pageable) {
    final String keyspace = name.getDatabaseName();
    log.debug("Attempting to list tables in Cassandra keyspace {} for request {}", keyspace, context);
    try {
        final KeyspaceMetadata keyspaceMetadata = this.getCluster().getMetadata().getKeyspace(keyspace);
        if (keyspaceMetadata == null) {
            throw new DatabaseNotFoundException(name);
        }
        // TODO: Should we include views?
        final List<TableInfo> tables = Lists.newArrayList();
        for (final TableMetadata tableMetadata : keyspaceMetadata.getTables()) {
            if (prefix != null && !tableMetadata.getName().startsWith(prefix.getTableName())) {
                continue;
            }
            tables.add(this.getTableInfo(name, tableMetadata));
        }
        // Sort
        if (sort != null) {
            final Comparator<TableInfo> tableComparator = Comparator.comparing((t) -> t.getName().getTableName());
            ConnectorUtils.sort(tables, sort, tableComparator);
        }
        // Paging
        final List<TableInfo> pagedTables = ConnectorUtils.paginate(tables, pageable);
        log.debug("Listed {} tables in Cassandra keyspace {} for request {}", pagedTables.size(), keyspace, context);
        return pagedTables;
    } catch (final DriverException de) {
        log.error(de.getMessage(), de);
        throw this.getExceptionMapper().toConnectorException(de, name);
    }
}
Also used : TableMetadata(com.datastax.driver.core.TableMetadata) DatabaseNotFoundException(com.netflix.metacat.common.server.connectors.exception.DatabaseNotFoundException) TableInfo(com.netflix.metacat.common.server.connectors.model.TableInfo) DriverException(com.datastax.driver.core.exceptions.DriverException) KeyspaceMetadata(com.datastax.driver.core.KeyspaceMetadata)

Example 18 with TableInfo

use of com.netflix.metacat.common.server.connectors.model.TableInfo in project metacat by Netflix.

the class HiveConnectorPartitionService method addUpdateDropPartitions.

protected void addUpdateDropPartitions(final QualifiedName tableQName, final Table table, final List<String> partitionNames, final List<PartitionInfo> addedPartitionInfos, final List<PartitionHolder> existingPartitionInfos, final Set<String> deletePartitionNames) {
    final String databaseName = table.getDbName();
    final String tableName = table.getTableName();
    final TableInfo tableInfo = hiveMetacatConverters.toTableInfo(tableQName, table);
    try {
        final List<Partition> existingPartitions = existingPartitionInfos.stream().map(p -> hiveMetacatConverters.fromPartitionInfo(tableInfo, p.getPartitionInfo())).collect(Collectors.toList());
        final List<Partition> addedPartitions = addedPartitionInfos.stream().map(p -> hiveMetacatConverters.fromPartitionInfo(tableInfo, p)).collect(Collectors.toList());
        // If alterIfExists=true, then alter partitions if they already exists
        if (!existingPartitionInfos.isEmpty()) {
            copyTableSdToPartitionSd(existingPartitions, table);
            metacatHiveClient.alterPartitions(databaseName, tableName, existingPartitions);
        }
        // Copy the storage details from the table if the partition does not contain the details.
        copyTableSdToPartitionSd(addedPartitions, table);
        // Drop partitions with ids in 'deletePartitionNames' and add 'addedPartitionInfos' partitions
        metacatHiveClient.addDropPartitions(databaseName, tableName, addedPartitions, Lists.newArrayList(deletePartitionNames));
    } catch (NoSuchObjectException exception) {
        if (exception.getMessage() != null && exception.getMessage().startsWith("Partition doesn't exist")) {
            throw new PartitionNotFoundException(tableQName, "", exception);
        } else {
            throw new TableNotFoundException(tableQName, exception);
        }
    } catch (MetaException | InvalidObjectException exception) {
        throw new InvalidMetaException("One or more partitions are invalid.", exception);
    } catch (AlreadyExistsException e) {
        throw new PartitionAlreadyExistsException(tableQName, partitionNames, e);
    } catch (TException exception) {
        throw new ConnectorException(String.format("Failed savePartitions hive table %s", tableName), exception);
    }
}
Also used : StringUtils(org.apache.commons.lang.StringUtils) Getter(lombok.Getter) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) SortOrder(com.netflix.metacat.common.dto.SortOrder) AuditInfo(com.netflix.metacat.common.server.connectors.model.AuditInfo) HashMap(java.util.HashMap) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) Partition(org.apache.hadoop.hive.metastore.api.Partition) Function(java.util.function.Function) Warehouse(org.apache.hadoop.hive.metastore.Warehouse) ArrayList(java.util.ArrayList) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) Strings(com.google.common.base.Strings) ConnectorPartitionService(com.netflix.metacat.common.server.connectors.ConnectorPartitionService) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException) Lists(com.google.common.collect.Lists) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) PartitionInfo(com.netflix.metacat.common.server.connectors.model.PartitionInfo) Map(java.util.Map) ConnectorContext(com.netflix.metacat.common.server.connectors.ConnectorContext) StorageInfo(com.netflix.metacat.common.server.connectors.model.StorageInfo) HiveConnectorInfoConverter(com.netflix.metacat.connector.hive.converters.HiveConnectorInfoConverter) PartitionUtil(com.netflix.metacat.common.server.partition.util.PartitionUtil) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) ConnectorRequestContext(com.netflix.metacat.common.server.connectors.ConnectorRequestContext) Nullable(javax.annotation.Nullable) PartitionHolder(com.netflix.metacat.connector.hive.sql.PartitionHolder) Pageable(com.netflix.metacat.common.dto.Pageable) TException(org.apache.thrift.TException) Set(java.util.Set) QualifiedName(com.netflix.metacat.common.QualifiedName) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) TableNotFoundException(com.netflix.metacat.common.server.connectors.exception.TableNotFoundException) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) Table(org.apache.hadoop.hive.metastore.api.Table) PartitionsSaveResponse(com.netflix.metacat.common.server.connectors.model.PartitionsSaveResponse) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) List(java.util.List) PartitionAlreadyExistsException(com.netflix.metacat.common.server.connectors.exception.PartitionAlreadyExistsException) TableInfo(com.netflix.metacat.common.server.connectors.model.TableInfo) PartitionsSaveRequest(com.netflix.metacat.common.server.connectors.model.PartitionsSaveRequest) PartitionListRequest(com.netflix.metacat.common.server.connectors.model.PartitionListRequest) ConnectorUtils(com.netflix.metacat.common.server.connectors.ConnectorUtils) PartitionNotFoundException(com.netflix.metacat.common.server.connectors.exception.PartitionNotFoundException) Collections(java.util.Collections) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Sort(com.netflix.metacat.common.dto.Sort) TException(org.apache.thrift.TException) Partition(org.apache.hadoop.hive.metastore.api.Partition) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) PartitionAlreadyExistsException(com.netflix.metacat.common.server.connectors.exception.PartitionAlreadyExistsException) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException) TableNotFoundException(com.netflix.metacat.common.server.connectors.exception.TableNotFoundException) PartitionNotFoundException(com.netflix.metacat.common.server.connectors.exception.PartitionNotFoundException) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) TableInfo(com.netflix.metacat.common.server.connectors.model.TableInfo) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) PartitionAlreadyExistsException(com.netflix.metacat.common.server.connectors.exception.PartitionAlreadyExistsException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException)

Aggregations

TableInfo (com.netflix.metacat.common.server.connectors.model.TableInfo)18 QualifiedName (com.netflix.metacat.common.QualifiedName)11 TableNotFoundException (com.netflix.metacat.common.server.connectors.exception.TableNotFoundException)11 Table (org.apache.hadoop.hive.metastore.api.Table)9 ConnectorException (com.netflix.metacat.common.server.connectors.exception.ConnectorException)7 Strings (com.google.common.base.Strings)6 ImmutableList (com.google.common.collect.ImmutableList)6 Lists (com.google.common.collect.Lists)6 DatabaseNotFoundException (com.netflix.metacat.common.server.connectors.exception.DatabaseNotFoundException)6 InvalidMetaException (com.netflix.metacat.common.server.connectors.exception.InvalidMetaException)6 PartitionInfo (com.netflix.metacat.common.server.connectors.model.PartitionInfo)6 List (java.util.List)6 Map (java.util.Map)6 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)6 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)6 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)6 Partition (org.apache.hadoop.hive.metastore.api.Partition)6 TException (org.apache.thrift.TException)6 Pageable (com.netflix.metacat.common.dto.Pageable)4 Sort (com.netflix.metacat.common.dto.Sort)4