Search in sources :

Example 6 with FieldInfo

use of com.netflix.metacat.common.server.connectors.model.FieldInfo in project metacat by Netflix.

the class HiveConnectorTableService method updateTable.

void updateTable(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final Table table, @Nonnull @NonNull final TableInfo tableInfo) throws MetaException {
    if (table.getParameters() == null || table.getParameters().isEmpty()) {
        table.setParameters(Maps.newHashMap());
    }
    table.getParameters().putIfAbsent(PARAMETER_EXTERNAL, "TRUE");
    if (tableInfo.getMetadata() != null) {
        table.getParameters().putAll(tableInfo.getMetadata());
    }
    //storage
    final StorageDescriptor sd = table.getSd() != null ? table.getSd() : new StorageDescriptor();
    String inputFormat = null;
    String outputFormat = null;
    Map<String, String> sdParameters = Maps.newHashMap();
    final String location = tableInfo.getSerde() == null ? null : tableInfo.getSerde().getUri();
    if (location != null) {
        sd.setLocation(location);
    } else if (sd.getLocation() == null) {
        final String locationStr = hiveConnectorDatabaseService.get(requestContext, QualifiedName.ofDatabase(tableInfo.getName().getCatalogName(), tableInfo.getName().getDatabaseName())).getUri();
        final Path databasePath = new Path(locationStr);
        final Path targetPath = new Path(databasePath, tableInfo.getName().getTableName());
        sd.setLocation(targetPath.toString());
    }
    if (sd.getSerdeInfo() == null) {
        sd.setSerdeInfo(new SerDeInfo());
    }
    final SerDeInfo serdeInfo = sd.getSerdeInfo();
    serdeInfo.setName(tableInfo.getName().getTableName());
    final StorageInfo storageInfo = tableInfo.getSerde();
    if (storageInfo != null) {
        if (!Strings.isNullOrEmpty(storageInfo.getSerializationLib())) {
            serdeInfo.setSerializationLib(storageInfo.getSerializationLib());
        }
        if (storageInfo.getSerdeInfoParameters() != null && !storageInfo.getSerdeInfoParameters().isEmpty()) {
            serdeInfo.setParameters(storageInfo.getSerdeInfoParameters());
        }
        inputFormat = storageInfo.getInputFormat();
        outputFormat = storageInfo.getOutputFormat();
        if (storageInfo.getParameters() != null && !storageInfo.getParameters().isEmpty()) {
            sdParameters = storageInfo.getParameters();
        }
    } else if (table.getSd() != null) {
        final HiveStorageFormat hiveStorageFormat = extractHiveStorageFormat(table);
        serdeInfo.setSerializationLib(hiveStorageFormat.getSerde());
        serdeInfo.setParameters(ImmutableMap.<String, String>of());
        inputFormat = hiveStorageFormat.getInputFormat();
        outputFormat = hiveStorageFormat.getOutputFormat();
    }
    final ImmutableList.Builder<FieldSchema> columnsBuilder = ImmutableList.builder();
    final ImmutableList.Builder<FieldSchema> partitionKeysBuilder = ImmutableList.builder();
    if (tableInfo.getFields() != null) {
        for (FieldInfo column : tableInfo.getFields()) {
            final FieldSchema field = hiveMetacatConverters.metacatToHiveField(column);
            if (column.isPartitionKey()) {
                partitionKeysBuilder.add(field);
            } else {
                columnsBuilder.add(field);
            }
        }
    }
    final ImmutableList<FieldSchema> columns = columnsBuilder.build();
    if (!columns.isEmpty()) {
        sd.setCols(columns);
    }
    if (!Strings.isNullOrEmpty(inputFormat)) {
        sd.setInputFormat(inputFormat);
    }
    if (!Strings.isNullOrEmpty(outputFormat)) {
        sd.setOutputFormat(outputFormat);
    }
    if (sd.getParameters() == null) {
        sd.setParameters(sdParameters);
    }
    //partition keys
    final ImmutableList<FieldSchema> partitionKeys = partitionKeysBuilder.build();
    if (!partitionKeys.isEmpty()) {
        table.setPartitionKeys(partitionKeys);
    }
    table.setSd(sd);
}
Also used : Path(org.apache.hadoop.fs.Path) ImmutableList(com.google.common.collect.ImmutableList) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) StorageInfo(com.netflix.metacat.common.server.connectors.model.StorageInfo) FieldInfo(com.netflix.metacat.common.server.connectors.model.FieldInfo)

Example 7 with FieldInfo

use of com.netflix.metacat.common.server.connectors.model.FieldInfo in project metacat by Netflix.

the class CassandraConnectorTableService method getTableInfo.

private TableInfo getTableInfo(@Nonnull @NonNull final QualifiedName name, @Nonnull @NonNull final TableMetadata tableMetadata) {
    final ImmutableList.Builder<FieldInfo> fieldInfoBuilder = ImmutableList.builder();
    // TODO: Ignores clustering, primary key, index, etc columns. We need to rework TableInfo to support
    for (final ColumnMetadata column : tableMetadata.getColumns()) {
        final String dataType = column.getType().toString();
        fieldInfoBuilder.add(FieldInfo.builder().name(column.getName()).sourceType(dataType).type(this.typeConverter.toMetacatType(dataType)).build());
    }
    return TableInfo.builder().name(QualifiedName.ofTable(name.getCatalogName(), name.getDatabaseName(), tableMetadata.getName())).fields(fieldInfoBuilder.build()).build();
}
Also used : ColumnMetadata(com.datastax.driver.core.ColumnMetadata) ImmutableList(com.google.common.collect.ImmutableList) FieldInfo(com.netflix.metacat.common.server.connectors.model.FieldInfo)

Example 8 with FieldInfo

use of com.netflix.metacat.common.server.connectors.model.FieldInfo in project metacat by Netflix.

the class HiveConnectorTableService method updateTable.

void updateTable(final ConnectorRequestContext requestContext, final Table table, final TableInfo tableInfo) throws MetaException {
    if (table.getParameters() == null || table.getParameters().isEmpty()) {
        table.setParameters(Maps.newHashMap());
    }
    // otherwise leaves it as such as VIRTUAL_VIEW
    if (!isVirtualView(table)) {
        table.getParameters().putIfAbsent(PARAMETER_EXTERNAL, "TRUE");
    } else {
        validAndUpdateVirtualView(table);
    }
    if (tableInfo.getMetadata() != null) {
        table.getParameters().putAll(tableInfo.getMetadata());
    }
    // storage
    final StorageDescriptor sd = table.getSd() != null ? table.getSd() : new StorageDescriptor();
    String inputFormat = null;
    String outputFormat = null;
    Map<String, String> sdParameters = Maps.newHashMap();
    final String location = tableInfo.getSerde() == null ? null : tableInfo.getSerde().getUri();
    if (location != null) {
        sd.setLocation(location);
    } else if (sd.getLocation() == null) {
        final String locationStr = hiveConnectorDatabaseService.get(requestContext, QualifiedName.ofDatabase(tableInfo.getName().getCatalogName(), tableInfo.getName().getDatabaseName())).getUri();
        final Path databasePath = new Path(locationStr);
        final Path targetPath = new Path(databasePath, tableInfo.getName().getTableName());
        sd.setLocation(targetPath.toString());
    }
    if (sd.getSerdeInfo() == null) {
        sd.setSerdeInfo(new SerDeInfo());
    }
    final SerDeInfo serdeInfo = sd.getSerdeInfo();
    serdeInfo.setName(tableInfo.getName().getTableName());
    final StorageInfo storageInfo = tableInfo.getSerde();
    if (storageInfo != null) {
        if (!Strings.isNullOrEmpty(storageInfo.getSerializationLib())) {
            serdeInfo.setSerializationLib(storageInfo.getSerializationLib());
        }
        if (storageInfo.getSerdeInfoParameters() != null && !storageInfo.getSerdeInfoParameters().isEmpty()) {
            serdeInfo.setParameters(storageInfo.getSerdeInfoParameters());
        }
        inputFormat = storageInfo.getInputFormat();
        outputFormat = storageInfo.getOutputFormat();
        if (storageInfo.getParameters() != null && !storageInfo.getParameters().isEmpty()) {
            sdParameters = storageInfo.getParameters();
        }
    } else if (table.getSd() != null) {
        final HiveStorageFormat hiveStorageFormat = this.extractHiveStorageFormat(table);
        serdeInfo.setSerializationLib(hiveStorageFormat.getSerde());
        serdeInfo.setParameters(ImmutableMap.of());
        inputFormat = hiveStorageFormat.getInputFormat();
        outputFormat = hiveStorageFormat.getOutputFormat();
    }
    final ImmutableList.Builder<FieldSchema> columnsBuilder = ImmutableList.builder();
    final ImmutableList.Builder<FieldSchema> partitionKeysBuilder = ImmutableList.builder();
    if (tableInfo.getFields() != null) {
        for (FieldInfo column : tableInfo.getFields()) {
            final FieldSchema field = hiveMetacatConverters.metacatToHiveField(column);
            if (column.isPartitionKey()) {
                partitionKeysBuilder.add(field);
            } else {
                columnsBuilder.add(field);
            }
        }
    }
    final ImmutableList<FieldSchema> columns = columnsBuilder.build();
    if (!columns.isEmpty()) {
        sd.setCols(columns);
    }
    if (!Strings.isNullOrEmpty(inputFormat)) {
        sd.setInputFormat(inputFormat);
    }
    if (!Strings.isNullOrEmpty(outputFormat)) {
        sd.setOutputFormat(outputFormat);
    }
    if (sd.getParameters() == null) {
        sd.setParameters(sdParameters);
    }
    // partition keys
    final ImmutableList<FieldSchema> partitionKeys = partitionKeysBuilder.build();
    if (!partitionKeys.isEmpty()) {
        table.setPartitionKeys(partitionKeys);
    }
    table.setSd(sd);
}
Also used : Path(org.apache.hadoop.fs.Path) ImmutableList(com.google.common.collect.ImmutableList) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) StorageInfo(com.netflix.metacat.common.server.connectors.model.StorageInfo) FieldInfo(com.netflix.metacat.common.server.connectors.model.FieldInfo)

Example 9 with FieldInfo

use of com.netflix.metacat.common.server.connectors.model.FieldInfo in project metacat by Netflix.

the class HiveConnectorInfoConverter method fromTableInfo.

/**
 * Converts from TableDto to the connector table.
 *
 * @param tableInfo Metacat table Info
 * @return connector table
 */
@Override
public Table fromTableInfo(final TableInfo tableInfo) {
    final QualifiedName name = tableInfo.getName();
    final String tableName = (name != null) ? name.getTableName() : "";
    final String databaseName = (name != null) ? name.getDatabaseName() : "";
    final StorageInfo storageInfo = tableInfo.getSerde();
    final String owner = (storageInfo != null && storageInfo.getOwner() != null) ? storageInfo.getOwner() : "";
    final AuditInfo auditInfo = tableInfo.getAudit();
    final int createTime = (auditInfo != null && auditInfo.getCreatedDate() != null) ? dateToEpochSeconds(auditInfo.getCreatedDate()) : 0;
    final Map<String, String> params = (tableInfo.getMetadata() != null) ? tableInfo.getMetadata() : new HashMap<>();
    final List<FieldInfo> fields = tableInfo.getFields();
    List<FieldSchema> partitionFields = Collections.emptyList();
    List<FieldSchema> nonPartitionFields = Collections.emptyList();
    if (fields != null) {
        nonPartitionFields = Lists.newArrayListWithCapacity(fields.size());
        partitionFields = Lists.newArrayListWithCapacity(fields.size());
        for (FieldInfo fieldInfo : fields) {
            if (fieldInfo.isPartitionKey()) {
                partitionFields.add(metacatToHiveField(fieldInfo));
            } else {
                nonPartitionFields.add(metacatToHiveField(fieldInfo));
            }
        }
    }
    final StorageDescriptor sd = fromStorageInfo(storageInfo, nonPartitionFields);
    final ViewInfo viewInfo = tableInfo.getView();
    final String tableType = (null != viewInfo && !Strings.isNullOrEmpty(viewInfo.getViewOriginalText())) ? TableType.VIRTUAL_VIEW.name() : TableType.EXTERNAL_TABLE.name();
    return new Table(tableName, databaseName, owner, createTime, 0, 0, sd, partitionFields, params, tableType.equals(TableType.VIRTUAL_VIEW.name()) ? tableInfo.getView().getViewOriginalText() : null, tableType.equals(TableType.VIRTUAL_VIEW.name()) ? tableInfo.getView().getViewExpandedText() : null, tableType);
}
Also used : AuditInfo(com.netflix.metacat.common.server.connectors.model.AuditInfo) Table(org.apache.hadoop.hive.metastore.api.Table) QualifiedName(com.netflix.metacat.common.QualifiedName) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) ViewInfo(com.netflix.metacat.common.server.connectors.model.ViewInfo) StorageInfo(com.netflix.metacat.common.server.connectors.model.StorageInfo) FieldInfo(com.netflix.metacat.common.server.connectors.model.FieldInfo)

Example 10 with FieldInfo

use of com.netflix.metacat.common.server.connectors.model.FieldInfo in project metacat by Netflix.

the class JdbcConnectorTableService method get.

/**
 * {@inheritDoc}
 */
@Override
public TableInfo get(@Nonnull final ConnectorRequestContext context, @Nonnull final QualifiedName name) {
    log.debug("Beginning to get table metadata for qualified name {} for request {}", name, context);
    try (Connection connection = this.dataSource.getConnection()) {
        final String database = name.getDatabaseName();
        connection.setSchema(database);
        final ImmutableList.Builder<FieldInfo> fields = ImmutableList.builder();
        try (ResultSet columns = this.getColumns(connection, name)) {
            while (columns.next()) {
                final String type = columns.getString("TYPE_NAME");
                final String size = columns.getString("COLUMN_SIZE");
                final String precision = columns.getString("DECIMAL_DIGITS");
                final String sourceType = this.buildSourceType(type, size, precision);
                final FieldInfo.FieldInfoBuilder fieldInfo = FieldInfo.builder().name(columns.getString("COLUMN_NAME")).sourceType(sourceType).type(this.typeConverter.toMetacatType(sourceType)).comment(columns.getString("REMARKS")).isNullable(columns.getString("IS_NULLABLE").equals("YES")).defaultValue(columns.getString("COLUMN_DEF"));
                if (size != null) {
                    fieldInfo.size(Integer.parseInt(size));
                }
                fields.add(fieldInfo.build());
            }
        }
        final List<FieldInfo> fieldInfos = fields.build();
        // If table does not exist, throw TableNotFoundException.
        if (fieldInfos.isEmpty() && !exists(context, name)) {
            throw new TableNotFoundException(name);
        }
        // Set table details
        final TableInfo result = TableInfo.builder().name(name).fields(fields.build()).build();
        setTableInfoDetails(connection, result);
        log.debug("Finished getting table metadata for qualified name {} for request {}", name, context);
        return result;
    } catch (final SQLException se) {
        throw new ConnectorException(se.getMessage(), se);
    }
}
Also used : TableNotFoundException(com.netflix.metacat.common.server.connectors.exception.TableNotFoundException) SQLException(java.sql.SQLException) ImmutableList(com.google.common.collect.ImmutableList) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) TableInfo(com.netflix.metacat.common.server.connectors.model.TableInfo) FieldInfo(com.netflix.metacat.common.server.connectors.model.FieldInfo)

Aggregations

FieldInfo (com.netflix.metacat.common.server.connectors.model.FieldInfo)10 ImmutableList (com.google.common.collect.ImmutableList)8 StorageInfo (com.netflix.metacat.common.server.connectors.model.StorageInfo)5 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)5 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)5 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)4 QualifiedName (com.netflix.metacat.common.QualifiedName)3 AuditInfo (com.netflix.metacat.common.server.connectors.model.AuditInfo)3 TableInfo (com.netflix.metacat.common.server.connectors.model.TableInfo)3 ViewInfo (com.netflix.metacat.common.server.connectors.model.ViewInfo)3 Table (org.apache.hadoop.hive.metastore.api.Table)3 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 Preconditions (com.google.common.base.Preconditions)2 Splitter (com.google.common.base.Splitter)2 Strings (com.google.common.base.Strings)2 Lists (com.google.common.collect.Lists)2 ConnectorInfoConverter (com.netflix.metacat.common.server.connectors.ConnectorInfoConverter)2 DatabaseInfo (com.netflix.metacat.common.server.connectors.model.DatabaseInfo)2 PartitionInfo (com.netflix.metacat.common.server.connectors.model.PartitionInfo)2 HiveTableUtil (com.netflix.metacat.connector.hive.util.HiveTableUtil)2