Search in sources :

Example 1 with Alter

use of com.datastax.driver.core.schemabuilder.Alter in project sdc by onap.

the class SdcSchemaBuilder method alterTable.

/**
 * check if there are new columns that were added to definition but don't exist in DB
 *
 * @param session
 * @param existingTablesMetadata
 * @param tableDescription
 * @param tableName
 * @param columnDescription
 */
private static void alterTable(Session session, Map<String, List<String>> existingTablesMetadata, ITableDescription tableDescription, String tableName, Map<String, ImmutablePair<DataType, Boolean>> columnDescription) {
    List<String> definedTableColumns = existingTablesMetadata.get(tableName);
    // add column to casandra if was added to table definition
    for (Map.Entry<String, ImmutablePair<DataType, Boolean>> column : columnDescription.entrySet()) {
        String columnName = column.getKey();
        if (!definedTableColumns.contains(columnName.toLowerCase())) {
            log.info("Adding new column {} to the table {}", columnName, tableName);
            Alter alter = SchemaBuilder.alterTable(tableDescription.getKeyspace(), tableDescription.getTableName());
            SchemaStatement addColumn = alter.addColumn(columnName).type(column.getValue().getLeft());
            log.trace("executing :{}", addColumn);
            session.execute(addColumn);
        }
    }
}
Also used : Alter(com.datastax.driver.core.schemabuilder.Alter) ImmutablePair(org.apache.commons.lang3.tuple.ImmutablePair) SchemaStatement(com.datastax.driver.core.schemabuilder.SchemaStatement) HashMap(java.util.HashMap) Map(java.util.Map)

Example 2 with Alter

use of com.datastax.driver.core.schemabuilder.Alter in project kafka-connect-scylladb by scylladb.

the class ScyllaDbSchemaBuilder method alter.

void alter(final ScyllaDbSchemaKey key, String tableName, SinkRecord record, TableMetadata.Table tableMetadata, TopicConfigs topicConfigs) {
    Preconditions.checkNotNull(tableMetadata, "tableMetadata cannot be null.");
    Preconditions.checkNotNull(record.valueSchema(), "valueSchema cannot be null.");
    log.trace("alter() - tableMetadata = '{}' ", tableMetadata);
    Map<String, DataType> addedColumns = new LinkedHashMap<>();
    if (topicConfigs != null && topicConfigs.isScyllaColumnsMapped()) {
        if (topicConfigs.getTablePartitionKeyMap().keySet().size() != tableMetadata.primaryKey().size()) {
            throw new DataException(String.format("Cannot alter primary key of a ScyllaDb Table. Existing primary key: '%s', " + "Primary key mapped in 'topic.my_topic.my_ks.my_table.mapping' config: '%s", Joiner.on("', '").join(tableMetadata.primaryKey()), Joiner.on("', '").join(topicConfigs.getTablePartitionKeyMap().keySet())));
        }
        for (Map.Entry<String, TopicConfigs.KafkaScyllaColumnMapper> entry : topicConfigs.getTableColumnMap().entrySet()) {
            String columnName = entry.getValue().getScyllaColumnName();
            log.trace("alter for mapping() - Checking if table has '{}' column.", columnName);
            final TableMetadata.Column columnMetadata = tableMetadata.columnMetadata(columnName);
            if (null == columnMetadata) {
                log.debug("alter for mapping() - Adding column '{}'", columnName);
                final DataType dataType = dataType(entry.getValue().getKafkaRecordField().schema());
                addedColumns.put(Metadata.quoteIfNecessary(columnName), dataType);
            } else {
                log.trace("alter for mapping() - Table already has '{}' column.", columnName);
            }
        }
    } else {
        for (final Field field : record.valueSchema().fields()) {
            log.trace("alter() - Checking if table has '{}' column.", field.name());
            final TableMetadata.Column columnMetadata = tableMetadata.columnMetadata(field.name());
            if (null == columnMetadata) {
                log.debug("alter() - Adding column '{}'", field.name());
                DataType dataType = dataType(field.schema());
                addedColumns.put(Metadata.quoteIfNecessary(field.name()), dataType);
            } else {
                log.trace("alter() - Table already has '{}' column.", field.name());
            }
        }
    }
    if (!addedColumns.isEmpty()) {
        final Alter alterTable = SchemaBuilder.alterTable(this.config.keyspace, tableName);
        if (!this.config.tableManageEnabled) {
            List<String> requiredAlterStatements = addedColumns.entrySet().stream().map(e -> alterTable.addColumn(e.getKey()).type(e.getValue()).toString()).collect(Collectors.toList());
            throw new DataException(String.format("Alter statement(s) needed. Missing column(s): '%s'\n%s;", Joiner.on("', '").join(addedColumns.keySet()), Joiner.on(';').join(requiredAlterStatements)));
        } else {
            String query = alterTable.withOptions().compressionOptions(config.tableCompressionAlgorithm).buildInternal();
            this.session.executeQuery(query);
            for (Map.Entry<String, DataType> e : addedColumns.entrySet()) {
                final String columnName = e.getKey();
                final DataType dataType = e.getValue();
                final Statement alterStatement = alterTable.addColumn(columnName).type(dataType);
                this.session.executeStatement(alterStatement);
            }
            this.session.onTableChanged(this.config.keyspace, tableName);
        }
    }
    this.schemaLookup.put(key, DEFAULT);
}
Also used : Date(org.apache.kafka.connect.data.Date) DataException(org.apache.kafka.connect.errors.DataException) LoggerFactory(org.slf4j.LoggerFactory) SchemaChangeListenerBase(com.datastax.driver.core.SchemaChangeListenerBase) HashSet(java.util.HashSet) LinkedHashMap(java.util.LinkedHashMap) Strings(com.google.common.base.Strings) Map(java.util.Map) Alter(com.datastax.driver.core.schemabuilder.Alter) org.apache.kafka.connect.data(org.apache.kafka.connect.data) Create(com.datastax.driver.core.schemabuilder.Create) TopicConfig(org.apache.kafka.common.config.TopicConfig) SchemaBuilder(com.datastax.driver.core.schemabuilder.SchemaBuilder) TopicConfigs(io.connect.scylladb.topictotable.TopicConfigs) Logger(org.slf4j.Logger) MoreObjects(com.google.common.base.MoreObjects) Set(java.util.Set) TableOptions(com.datastax.driver.core.schemabuilder.TableOptions) ComparisonChain(com.google.common.collect.ComparisonChain) Collectors(java.util.stream.Collectors) Objects(java.util.Objects) TimeUnit(java.util.concurrent.TimeUnit) Metadata(com.datastax.driver.core.Metadata) List(java.util.List) DataType(com.datastax.driver.core.DataType) SinkRecord(org.apache.kafka.connect.sink.SinkRecord) Preconditions(com.google.common.base.Preconditions) CacheBuilder(com.google.common.cache.CacheBuilder) Cache(com.google.common.cache.Cache) Statement(com.datastax.driver.core.Statement) Joiner(com.google.common.base.Joiner) Statement(com.datastax.driver.core.Statement) LinkedHashMap(java.util.LinkedHashMap) Alter(com.datastax.driver.core.schemabuilder.Alter) DataException(org.apache.kafka.connect.errors.DataException) DataType(com.datastax.driver.core.DataType) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map)

Aggregations

Alter (com.datastax.driver.core.schemabuilder.Alter)2 Map (java.util.Map)2 DataType (com.datastax.driver.core.DataType)1 Metadata (com.datastax.driver.core.Metadata)1 SchemaChangeListenerBase (com.datastax.driver.core.SchemaChangeListenerBase)1 Statement (com.datastax.driver.core.Statement)1 Create (com.datastax.driver.core.schemabuilder.Create)1 SchemaBuilder (com.datastax.driver.core.schemabuilder.SchemaBuilder)1 SchemaStatement (com.datastax.driver.core.schemabuilder.SchemaStatement)1 TableOptions (com.datastax.driver.core.schemabuilder.TableOptions)1 Joiner (com.google.common.base.Joiner)1 MoreObjects (com.google.common.base.MoreObjects)1 Preconditions (com.google.common.base.Preconditions)1 Strings (com.google.common.base.Strings)1 Cache (com.google.common.cache.Cache)1 CacheBuilder (com.google.common.cache.CacheBuilder)1 ComparisonChain (com.google.common.collect.ComparisonChain)1 TopicConfigs (io.connect.scylladb.topictotable.TopicConfigs)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1