Search in sources :

Example 1 with MappingField

use of com.datastax.oss.dsbulk.mapping.MappingField in project dsbulk by datastax.

the class SchemaSettings method inferBatchInsertQuery.

private String inferBatchInsertQuery(ImmutableMultimap<MappingField, CQLFragment> fieldsToVariables) {
    List<CQLWord> pks = primaryKeyColumns();
    Set<CQLFragment> allSpecificVariables = new LinkedHashSet<>();
    Map<CQLWord, WriteTimeAndTTL> specificWriteTimesAndTTLs = new LinkedHashMap<>();
    boolean hasGlobalWritetime = false;
    boolean hasGlobalTTL = false;
    for (CQLFragment variable : fieldsToVariables.values()) {
        if (variable instanceof FunctionCall) {
            FunctionCall functionCall = (FunctionCall) variable;
            if (functionCall.getFunctionName().equals(WRITETIME)) {
                for (CQLFragment arg : functionCall.getArgs()) {
                    if (arg.equals(STAR)) {
                        if (preserveTimestamp) {
                            throw new IllegalStateException("Invalid mapping: writetime(*) is not allowed when schema.preserveTimestamp is true.");
                        }
                        hasGlobalWritetime = true;
                    } else {
                        CQLWord col = (CQLWord) arg;
                        if (pks.contains(col)) {
                            throw new IllegalStateException("Invalid mapping: writetime() function arg must be either '*' or a non-primary key column name.");
                        }
                        if (fieldsToVariables.containsValue(col)) {
                            allSpecificVariables.add(col);
                            allSpecificVariables.add(functionCall);
                            specificWriteTimesAndTTLs.compute(col, (k, v) -> {
                                if (v == null) {
                                    v = new WriteTimeAndTTL();
                                    MappingField colField = fieldsToVariables.inverse().get(col).iterator().next();
                                    v.value = colField instanceof CQLFragment ? (CQLFragment) colField : col;
                                }
                                MappingField writetimeField = fieldsToVariables.inverse().get(functionCall).iterator().next();
                                v.writetime = writetimeField instanceof CQLLiteral ? (CQLLiteral) writetimeField : CQLWord.fromInternal(functionCall.render(INTERNAL));
                                return v;
                            });
                        } else {
                            throw new IllegalStateException(String.format("Invalid mapping: target column %s must be present if %s is also present.", col.render(VARIABLE), functionCall.render(INTERNAL)));
                        }
                    }
                }
            } else if (functionCall.getFunctionName().equals(TTL)) {
                for (CQLFragment arg : functionCall.getArgs()) {
                    if (arg.equals(STAR)) {
                        if (preserveTtl) {
                            throw new IllegalStateException("Invalid mapping: ttl(*) is not allowed when schema.preserveTtl is true.");
                        }
                        hasGlobalTTL = true;
                    } else {
                        CQLWord col = (CQLWord) arg;
                        if (pks.contains(col)) {
                            throw new IllegalStateException("Invalid mapping: ttl() function arg must be either '*' or a non-primary key column name.");
                        }
                        if (fieldsToVariables.containsValue(col)) {
                            allSpecificVariables.add(col);
                            allSpecificVariables.add(functionCall);
                            specificWriteTimesAndTTLs.compute((CQLWord) arg, (k, v) -> {
                                if (v == null) {
                                    v = new WriteTimeAndTTL();
                                    MappingField colField = fieldsToVariables.inverse().get(col).iterator().next();
                                    v.value = colField instanceof CQLFragment ? (CQLFragment) colField : col;
                                }
                                MappingField ttlField = fieldsToVariables.inverse().get(functionCall).iterator().next();
                                v.ttl = ttlField instanceof CQLLiteral ? (CQLLiteral) ttlField : CQLWord.fromInternal(functionCall.render(INTERNAL));
                                return v;
                            });
                        } else {
                            throw new IllegalStateException(String.format("Invalid mapping: target column %s must be present if %s is also present.", col.render(VARIABLE), functionCall.render(INTERNAL)));
                        }
                    }
                }
            }
        }
    }
    ImmutableMultimap.Builder<MappingField, CQLFragment> defaultFieldsToVariablesBuilder = ImmutableMultimap.builder();
    for (Entry<MappingField, CQLFragment> entry : fieldsToVariables.entries()) {
        CQLFragment variable = entry.getValue();
        if (!allSpecificVariables.contains(variable)) {
            defaultFieldsToVariablesBuilder.put(entry);
        }
    }
    ImmutableMultimap<MappingField, CQLFragment> defaultFieldsToVariables = defaultFieldsToVariablesBuilder.build();
    boolean hasRegularColumnsWithoutSpecificWritetimeAndTTL = defaultFieldsToVariables.values().stream().filter(CQLWord.class::isInstance).map(CQLWord.class::cast).anyMatch(variable -> !pks.contains(variable));
    if (!hasRegularColumnsWithoutSpecificWritetimeAndTTL) {
        if (hasGlobalWritetime) {
            throw new IllegalStateException("Invalid mapping: writetime(*) function has no target column.");
        }
        if (hasGlobalTTL) {
            throw new IllegalStateException("Invalid mapping: ttl(*) function has no target column.");
        }
    }
    StringBuilder sb = new StringBuilder();
    if (!hasRegularColumnsWithoutSpecificWritetimeAndTTL && specificWriteTimesAndTTLs.size() == 1) {
        // edge case: there is only one regular column in the table,
        // and it has specific writetime or tll: no need for a BATCH as there is only one child
        // statement.
        Entry<CQLWord, WriteTimeAndTTL> entry = specificWriteTimesAndTTLs.entrySet().iterator().next();
        appendBatchChildQuery(sb, entry.getKey(), entry.getValue().value, entry.getValue().writetime, entry.getValue().ttl, pks);
    } else {
        sb.append("BEGIN UNLOGGED BATCH ");
        // generate a first INSERT INTO child query similar to the ones generated for simple INSERTs.
        if (hasRegularColumnsWithoutSpecificWritetimeAndTTL) {
            sb.append(inferInsertQuery(defaultFieldsToVariables)).append("; ");
        }
        // generate a specific INSERT INTO query for that variable only + its TTL and/or writetime.
        for (Entry<CQLWord, WriteTimeAndTTL> entry : specificWriteTimesAndTTLs.entrySet()) {
            appendBatchChildQuery(sb, entry.getKey(), entry.getValue().value, entry.getValue().writetime, entry.getValue().ttl, pks);
            sb.append("; ");
        }
        sb.append("APPLY BATCH");
    }
    return sb.toString();
}
Also used : LinkedHashSet(java.util.LinkedHashSet) CQLLiteral(com.datastax.oss.dsbulk.mapping.CQLLiteral) TypedCQLLiteral(com.datastax.oss.dsbulk.mapping.TypedCQLLiteral) MAPPED_OR_INDEXED(com.datastax.oss.dsbulk.mapping.MappingPreference.MAPPED_OR_INDEXED) CqlIdentifier(com.datastax.oss.driver.api.core.CqlIdentifier) MAPPED_ONLY(com.datastax.oss.dsbulk.mapping.MappingPreference.MAPPED_ONLY) DefaultMapping(com.datastax.oss.dsbulk.mapping.DefaultMapping) GenericType(com.datastax.oss.driver.api.core.type.reflect.GenericType) TokenRangeReadStatementGenerator(com.datastax.oss.dsbulk.partitioner.TokenRangeReadStatementGenerator) ConfigUtils(com.datastax.oss.dsbulk.config.ConfigUtils) ALIASED_SELECTOR(com.datastax.oss.dsbulk.mapping.CQLRenderMode.ALIASED_SELECTOR) BatchType(com.datastax.oss.driver.api.core.cql.BatchType) Map(java.util.Map) VisibleForTesting(com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting) ProtocolVersion(com.datastax.oss.driver.api.core.ProtocolVersion) EnumSet(java.util.EnumSet) CQLRenderMode(com.datastax.oss.dsbulk.mapping.CQLRenderMode) VARIABLE(com.datastax.oss.dsbulk.mapping.CQLRenderMode.VARIABLE) MapType(com.datastax.oss.driver.api.core.type.MapType) GraphUtils(com.datastax.oss.dsbulk.workflow.commons.utils.GraphUtils) Set(java.util.Set) ImmutableList(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList) Field(com.datastax.oss.dsbulk.connectors.api.Field) Stream(java.util.stream.Stream) ConfigException(com.typesafe.config.ConfigException) CQLWord(com.datastax.oss.dsbulk.mapping.CQLWord) INDEXED_ONLY(com.datastax.oss.dsbulk.mapping.MappingPreference.INDEXED_ONLY) MICROSECONDS(java.util.concurrent.TimeUnit.MICROSECONDS) CQLLiteral(com.datastax.oss.dsbulk.mapping.CQLLiteral) ImmutableSet(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet) Preconditions(com.datastax.oss.driver.shaded.guava.common.base.Preconditions) STAR(com.datastax.oss.dsbulk.mapping.MappingInspector.STAR) ViewMetadata(com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) CqlSession(com.datastax.oss.driver.api.core.CqlSession) DefaultReadResultMapper(com.datastax.oss.dsbulk.workflow.commons.schema.DefaultReadResultMapper) FunctionCall(com.datastax.oss.dsbulk.mapping.FunctionCall) StreamSupport(java.util.stream.StreamSupport) Metadata(com.datastax.oss.driver.api.core.metadata.Metadata) LinkedHashSet(java.util.LinkedHashSet) DseEdgeMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata) TableMetadata(com.datastax.oss.driver.api.core.metadata.schema.TableMetadata) RelationMetadata(com.datastax.oss.driver.api.core.metadata.schema.RelationMetadata) MappingPreference(com.datastax.oss.dsbulk.mapping.MappingPreference) Config(com.typesafe.config.Config) NestedBatchException(com.datastax.oss.dsbulk.workflow.commons.schema.NestedBatchException) DataType(com.datastax.oss.driver.api.core.type.DataType) ConvertingCodecFactory(com.datastax.oss.dsbulk.codecs.api.ConvertingCodecFactory) DefaultRecordMapper(com.datastax.oss.dsbulk.workflow.commons.schema.DefaultRecordMapper) RecordMapper(com.datastax.oss.dsbulk.workflow.commons.schema.RecordMapper) DseTableMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata) CQLFragment(com.datastax.oss.dsbulk.mapping.CQLFragment) WRITETIME(com.datastax.oss.dsbulk.mapping.MappingInspector.WRITETIME) ReadResultMapper(com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultMapper) DseVertexMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata) Nullable(edu.umd.cs.findbugs.annotations.Nullable) StatisticsMode(com.datastax.oss.dsbulk.workflow.commons.settings.StatsSettings.StatisticsMode) DefaultReadResultCounter(com.datastax.oss.dsbulk.workflow.commons.schema.DefaultReadResultCounter) CodecUtils.instantToNumber(com.datastax.oss.dsbulk.codecs.api.util.CodecUtils.instantToNumber) IndexedMappingField(com.datastax.oss.dsbulk.mapping.IndexedMappingField) LoggerFactory(org.slf4j.LoggerFactory) MappingField(com.datastax.oss.dsbulk.mapping.MappingField) QueryInspector(com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector) Mapping(com.datastax.oss.dsbulk.mapping.Mapping) ReadResultCounter(com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultCounter) Lists(com.datastax.oss.driver.shaded.guava.common.collect.Lists) ConvertingCodec(com.datastax.oss.dsbulk.codecs.api.ConvertingCodec) NonNull(edu.umd.cs.findbugs.annotations.NonNull) Predicates(com.datastax.oss.driver.shaded.guava.common.base.Predicates) URI(java.net.URI) IndexMetadata(com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata) Record(com.datastax.oss.dsbulk.connectors.api.Record) ImmutableMultimap(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap) Predicate(java.util.function.Predicate) Collection(java.util.Collection) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) NAMED_ASSIGNMENT(com.datastax.oss.dsbulk.mapping.CQLRenderMode.NAMED_ASSIGNMENT) Objects(java.util.Objects) KeyspaceMetadata(com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata) DataTypes(com.datastax.oss.driver.api.core.type.DataTypes) List(java.util.List) Entry(java.util.Map.Entry) Optional(java.util.Optional) SetType(com.datastax.oss.driver.api.core.type.SetType) TIMESTAMP_PATTERN(com.datastax.oss.dsbulk.codecs.api.CommonConversionContext.TIMESTAMP_PATTERN) MappingInspector(com.datastax.oss.dsbulk.mapping.MappingInspector) DseGraphKeyspaceMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseGraphKeyspaceMetadata) ListType(com.datastax.oss.driver.api.core.type.ListType) HashSet(java.util.HashSet) RecordMetadata(com.datastax.oss.dsbulk.connectors.api.RecordMetadata) ColumnDefinitions(com.datastax.oss.driver.api.core.cql.ColumnDefinitions) MappedMappingField(com.datastax.oss.dsbulk.mapping.MappedMappingField) ImmutableSetMultimap(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSetMultimap) INTERNAL(com.datastax.oss.dsbulk.mapping.CQLRenderMode.INTERNAL) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) TypedCQLLiteral(com.datastax.oss.dsbulk.mapping.TypedCQLLiteral) PreparedStatement(com.datastax.oss.driver.api.core.cql.PreparedStatement) DseGraphTableMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseGraphTableMetadata) ColumnMetadata(com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata) DefaultProtocolVersion(com.datastax.oss.driver.api.core.DefaultProtocolVersion) UserDefinedType(com.datastax.oss.driver.api.core.type.UserDefinedType) TTL(com.datastax.oss.dsbulk.mapping.MappingInspector.TTL) Multimap(com.datastax.oss.driver.shaded.guava.common.collect.Multimap) EPOCH(java.time.Instant.EPOCH) Collections(java.util.Collections) Statement(com.datastax.oss.driver.api.core.cql.Statement) CQLFragment(com.datastax.oss.dsbulk.mapping.CQLFragment) IndexedMappingField(com.datastax.oss.dsbulk.mapping.IndexedMappingField) MappingField(com.datastax.oss.dsbulk.mapping.MappingField) MappedMappingField(com.datastax.oss.dsbulk.mapping.MappedMappingField) LinkedHashMap(java.util.LinkedHashMap) CQLWord(com.datastax.oss.dsbulk.mapping.CQLWord) FunctionCall(com.datastax.oss.dsbulk.mapping.FunctionCall) ImmutableMultimap(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap)

Example 2 with MappingField

use of com.datastax.oss.dsbulk.mapping.MappingField in project dsbulk by datastax.

the class SchemaSettings method inferUpdateCounterQuery.

private String inferUpdateCounterQuery(ImmutableMultimap<MappingField, CQLFragment> fieldsToVariables) {
    StringBuilder sb = new StringBuilder("UPDATE ");
    sb.append(keyspaceName.render(VARIABLE)).append('.').append(tableName.render(VARIABLE));
    // Note: TTL and timestamp are not allowed in counter queries;
    // a test is made inside the following method for fixed TTL and timestamps;
    // function-style TTL and timestamps will be tested below and forbidden as well
    appendWriteTimeAndTTL(sb, null, null);
    sb.append(" SET ");
    Set<CQLFragment> cols = maybeSortCols(fieldsToVariables);
    Iterator<CQLFragment> colsIterator = cols.iterator();
    boolean isFirst = true;
    List<CQLWord> pks = primaryKeyColumns();
    while (colsIterator.hasNext()) {
        CQLFragment col = colsIterator.next();
        if (col instanceof CQLWord && pks.contains(col)) {
            continue;
        }
        // forbid writetime and TTL right-hand function calls when updating a counter table
        if (col instanceof FunctionCall) {
            throw new IllegalArgumentException("Invalid mapping: function calls are not allowed when updating a counter table.");
        }
        // for update queries there can be only one field mapped to a given column
        MappingField field = fieldsToVariables.inverse().get(col).iterator().next();
        if (field instanceof FunctionCall) {
            throw new IllegalArgumentException("Invalid mapping: function calls are not allowed when updating a counter table.");
        } else if (field instanceof CQLLiteral) {
            throw new IllegalArgumentException("Invalid mapping: constant expressions are not allowed when updating a counter table.");
        }
        if (!isFirst) {
            sb.append(", ");
        }
        isFirst = false;
        sb.append(col.render(VARIABLE)).append(" = ").append(col.render(VARIABLE)).append(" + ").append(col.render(NAMED_ASSIGNMENT));
    }
    sb.append(" WHERE ");
    Iterator<CQLWord> pksIterator = pks.iterator();
    while (pksIterator.hasNext()) {
        CQLFragment col = pksIterator.next();
        sb.append(col.render(VARIABLE)).append(" = ").append(col.render(NAMED_ASSIGNMENT));
        if (pksIterator.hasNext()) {
            sb.append(" AND ");
        }
    }
    return sb.toString();
}
Also used : CQLLiteral(com.datastax.oss.dsbulk.mapping.CQLLiteral) TypedCQLLiteral(com.datastax.oss.dsbulk.mapping.TypedCQLLiteral) CQLWord(com.datastax.oss.dsbulk.mapping.CQLWord) CQLFragment(com.datastax.oss.dsbulk.mapping.CQLFragment) FunctionCall(com.datastax.oss.dsbulk.mapping.FunctionCall) IndexedMappingField(com.datastax.oss.dsbulk.mapping.IndexedMappingField) MappingField(com.datastax.oss.dsbulk.mapping.MappingField) MappedMappingField(com.datastax.oss.dsbulk.mapping.MappedMappingField)

Example 3 with MappingField

use of com.datastax.oss.dsbulk.mapping.MappingField in project dsbulk by datastax.

the class SchemaSettings method inferInsertQuery.

private String inferInsertQuery(ImmutableMultimap<MappingField, CQLFragment> fieldsToVariables) {
    ImmutableMultimap.Builder<MappingField, CQLFragment> regularFieldsToVariablesBuilder = ImmutableMultimap.builder();
    CQLFragment writetime = null;
    CQLFragment ttl = null;
    for (Entry<MappingField, CQLFragment> entry : fieldsToVariables.entries()) {
        if (entry.getValue() instanceof FunctionCall) {
            FunctionCall functionCall = (FunctionCall) entry.getValue();
            if (functionCall.getFunctionName().equals(WRITETIME)) {
                assert writetime == null;
                if (entry.getKey() instanceof CQLLiteral) {
                    writetime = (CQLLiteral) entry.getKey();
                } else {
                    writetime = CQLWord.fromInternal(functionCall.render(INTERNAL));
                }
            } else if (functionCall.getFunctionName().equals(TTL)) {
                assert ttl == null;
                if (entry.getKey() instanceof CQLLiteral) {
                    ttl = (CQLLiteral) entry.getKey();
                } else {
                    ttl = CQLWord.fromInternal(functionCall.render(INTERNAL));
                }
            }
        } else {
            regularFieldsToVariablesBuilder.put(entry);
        }
    }
    ImmutableMultimap<MappingField, CQLFragment> regularFieldsToVariables = regularFieldsToVariablesBuilder.build();
    StringBuilder sb = new StringBuilder("INSERT INTO ");
    sb.append(keyspaceName.render(VARIABLE)).append('.').append(tableName.render(VARIABLE)).append(" (");
    appendColumnNames(regularFieldsToVariables, sb, VARIABLE);
    sb.append(") VALUES (");
    Set<CQLFragment> cols = maybeSortCols(regularFieldsToVariables);
    Iterator<CQLFragment> it = cols.iterator();
    while (it.hasNext()) {
        CQLFragment col = it.next();
        // for insert queries there can be only one field mapped to a given column
        MappingField field = fieldsToVariables.inverse().get(col).iterator().next();
        if (field instanceof CQLFragment) {
            sb.append(((CQLFragment) field).render(NAMED_ASSIGNMENT));
        } else {
            sb.append(col.render(NAMED_ASSIGNMENT));
        }
        if (it.hasNext()) {
            sb.append(", ");
        }
    }
    sb.append(')');
    appendWriteTimeAndTTL(sb, writetime, ttl);
    return sb.toString();
}
Also used : CQLLiteral(com.datastax.oss.dsbulk.mapping.CQLLiteral) TypedCQLLiteral(com.datastax.oss.dsbulk.mapping.TypedCQLLiteral) ImmutableMultimap(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap) CQLFragment(com.datastax.oss.dsbulk.mapping.CQLFragment) FunctionCall(com.datastax.oss.dsbulk.mapping.FunctionCall) IndexedMappingField(com.datastax.oss.dsbulk.mapping.IndexedMappingField) MappingField(com.datastax.oss.dsbulk.mapping.MappingField) MappedMappingField(com.datastax.oss.dsbulk.mapping.MappedMappingField)

Example 4 with MappingField

use of com.datastax.oss.dsbulk.mapping.MappingField in project dsbulk by datastax.

the class SchemaSettings method init.

public void init(CqlSession session, ConvertingCodecFactory codecFactory, boolean indexedMappingSupported, boolean mappedMappingSupported) {
    this.codecFactory = codecFactory;
    try {
        if (config.hasPath(KEYSPACE) && config.hasPath(GRAPH)) {
            throw new IllegalArgumentException("Settings schema.keyspace and schema.graph are mutually exclusive");
        }
        if (config.hasPath(TABLE) && config.hasPath(VERTEX)) {
            throw new IllegalArgumentException("Settings schema.table and schema.vertex are mutually exclusive");
        }
        if (config.hasPath(TABLE) && config.hasPath(EDGE)) {
            throw new IllegalArgumentException("Settings schema.table and schema.edge are mutually exclusive");
        }
        if (config.hasPath(VERTEX) && config.hasPath(EDGE)) {
            throw new IllegalArgumentException("Settings schema.vertex and schema.edge are mutually exclusive");
        }
        if (config.hasPath(EDGE)) {
            if (!config.hasPath(FROM)) {
                throw new IllegalArgumentException("Setting schema.from is required when schema.edge is specified");
            }
            if (!config.hasPath(TO)) {
                throw new IllegalArgumentException("Setting schema.to is required when schema.edge is specified");
            }
        }
        if (config.hasPath(QUERY) && (config.hasPath(TABLE) || config.hasPath(VERTEX) || config.hasPath(EDGE))) {
            throw new IllegalArgumentException("Setting schema.query must not be defined if schema.table, schema.vertex or schema.edge are defined");
        }
        if ((!config.hasPath(KEYSPACE) && !config.hasPath(GRAPH)) && (config.hasPath(TABLE) || config.hasPath(VERTEX) || config.hasPath(EDGE))) {
            throw new IllegalArgumentException("Settings schema.keyspace or schema.graph must be defined if schema.table, schema.vertex or schema.edge are defined");
        }
        if (config.hasPath(KEYSPACE)) {
            keyspace = locateKeyspace(session.getMetadata(), config.getString(KEYSPACE));
        } else if (config.hasPath(GRAPH)) {
            keyspace = locateKeyspace(session.getMetadata(), config.getString(GRAPH));
        }
        if (keyspace != null) {
            if (config.hasPath(TABLE)) {
                table = locateTable(keyspace, config.getString(TABLE));
            } else if (config.hasPath(VERTEX)) {
                table = locateVertexTable(keyspace, config.getString(VERTEX));
            } else if (config.hasPath(EDGE)) {
                table = locateEdgeTable(keyspace, config.getString(EDGE), config.getString(FROM), config.getString(TO));
            }
        }
        // Timestamp and TTL
        ttlSeconds = config.getInt(QUERY_TTL);
        if (config.hasPath(QUERY_TIMESTAMP)) {
            String timestampStr = config.getString(QUERY_TIMESTAMP);
            try {
                ConvertingCodec<String, Instant> codec = codecFactory.createConvertingCodec(DataTypes.TIMESTAMP, GenericType.STRING, true);
                Instant instant = codec.externalToInternal(timestampStr);
                this.timestampMicros = instantToNumber(instant, MICROSECONDS, EPOCH);
            } catch (Exception e) {
                Object format = codecFactory.getContext().getAttribute(TIMESTAMP_PATTERN);
                throw new IllegalArgumentException(String.format("Expecting schema.queryTimestamp to be in %s format but got '%s'", format, timestampStr));
            }
        } else {
            this.timestampMicros = -1L;
        }
        preserveTimestamp = config.getBoolean(PRESERVE_TIMESTAMP);
        preserveTtl = config.getBoolean(PRESERVE_TTL);
        if (config.hasPath(QUERY)) {
            query = config.getString(QUERY);
            queryInspector = new QueryInspector(query);
            if (queryInspector.getKeyspaceName().isPresent()) {
                if (keyspace != null) {
                    throw new IllegalArgumentException("Setting schema.keyspace must not be provided when schema.query contains a keyspace-qualified statement");
                }
                CQLWord keyspaceName = queryInspector.getKeyspaceName().get();
                keyspace = session.getMetadata().getKeyspace(keyspaceName.asIdentifier()).orElse(null);
                if (keyspace == null) {
                    throw new IllegalArgumentException(String.format("Value for schema.query references a non-existent keyspace: %s", keyspaceName.render(VARIABLE)));
                }
            } else if (keyspace == null) {
                throw new IllegalArgumentException("Setting schema.keyspace must be provided when schema.query does not contain a keyspace-qualified statement");
            }
            CQLWord tableName = queryInspector.getTableName();
            table = keyspace.getTable(tableName.asIdentifier()).orElse(null);
            if (table == null) {
                table = keyspace.getView(tableName.asIdentifier()).orElse(null);
                if (table == null) {
                    throw new IllegalArgumentException(String.format("Value for schema.query references a non-existent table or materialized view: %s", tableName.render(VARIABLE)));
                }
            }
            // If a query is provided, ttl and timestamp must not be.
            if (timestampMicros != -1 || ttlSeconds != -1) {
                throw new IllegalArgumentException("Setting schema.query must not be defined if schema.queryTtl or schema.queryTimestamp is defined");
            }
            if (preserveTimestamp || preserveTtl) {
                throw new IllegalArgumentException("Setting schema.query must not be defined if schema.preserveTimestamp or schema.preserveTtl is defined");
            }
        } else {
            if (keyspace == null || table == null) {
                // Either the keyspace and table must be present, or the query must be present.
                throw new IllegalArgumentException("When schema.query is not defined, " + "then either schema.keyspace or schema.graph must be defined, " + "and either schema.table, schema.vertex or schema.edge must be defined");
            }
        }
        assert keyspace != null;
        assert table != null;
        keyspaceName = CQLWord.fromCqlIdentifier(keyspace.getName());
        tableName = CQLWord.fromCqlIdentifier(table.getName());
        if (indexedMappingSupported && mappedMappingSupported) {
            mappingPreference = MAPPED_OR_INDEXED;
        } else if (indexedMappingSupported) {
            mappingPreference = INDEXED_ONLY;
        } else if (mappedMappingSupported) {
            mappingPreference = MAPPED_ONLY;
        } else if (schemaGenerationStrategy.isMapping()) {
            throw new IllegalArgumentException("Connector must support at least one of indexed or mapped mappings");
        }
        if (config.hasPath(MAPPING)) {
            if (!schemaGenerationStrategy.isMapping()) {
                throw new IllegalArgumentException("Setting schema.mapping must not be defined when counting rows in a table");
            }
            Supplier<CQLWord> usingTimestampVariable = null;
            Supplier<CQLWord> usingTTLVariable = null;
            if (queryInspector != null) {
                usingTimestampVariable = queryInspector.getUsingTimestampVariable()::get;
                usingTTLVariable = queryInspector.getUsingTTLVariable()::get;
            }
            // TODO remove support for providing external variable names for the deprecated
            // __ttl and __timestamp mapping tokens.
            @SuppressWarnings("deprecation") MappingInspector mapping = new MappingInspector(config.getString(MAPPING), schemaGenerationStrategy.isWriting(), mappingPreference, usingTimestampVariable, usingTTLVariable);
            this.mapping = mapping;
            Set<MappingField> fields = mapping.getExplicitMappings().keySet();
            Collection<CQLFragment> variables = mapping.getExplicitMappings().values();
            if (schemaGenerationStrategy.isWriting()) {
                // now() = c1 only allowed if schema.query not present
                if (containsFunctionCalls(variables, WRITETIME_OR_TTL.negate())) {
                    throw new IllegalArgumentException("Misplaced function call detected on the right side of a mapping entry; " + "please review your schema.mapping setting");
                }
                if (query != null && containsFunctionCalls(variables, WRITETIME_OR_TTL)) {
                    throw new IllegalArgumentException("Setting schema.query must not be defined when loading if schema.mapping " + "contains a writetime or ttl function on the right side of a mapping entry");
                }
                if (query != null && containsFunctionCalls(fields)) {
                    throw new IllegalArgumentException("Setting schema.query must not be defined when loading if schema.mapping " + "contains a function on the left side of a mapping entry");
                }
                if (containsWritetimeOrTTLFunctionCalls(mapping.getExplicitMappings())) {
                    throw new IllegalArgumentException("Misplaced function call detected on the left side of a writetime or TTL mapping entry; " + "please review your schema.mapping setting");
                }
                // (text)'abc' = c1 only allowed if schema.query not present
                if (containsConstantExpressions(variables)) {
                    throw new IllegalArgumentException("Misplaced constant expression detected on the right side of a mapping entry; " + "please review your schema.mapping setting");
                }
                if (query != null && containsConstantExpressions(fields)) {
                    throw new IllegalArgumentException("Setting schema.query must not be defined when loading if schema.mapping " + "contains a constant expression on the left side of a mapping entry");
                }
            }
            if (schemaGenerationStrategy.isReading()) {
                // f1 = now() only allowed if schema.query not present
                if (containsFunctionCalls(fields)) {
                    throw new IllegalArgumentException("Misplaced function call detected on the left side of a mapping entry; " + "please review your schema.mapping setting");
                }
                if (query != null && containsFunctionCalls(variables)) {
                    throw new IllegalArgumentException("Setting schema.query must not be defined when unloading if schema.mapping " + "contains a function on the right side of a mapping entry");
                }
                // supported
                if (containsConstantExpressions(fields)) {
                    throw new IllegalArgumentException("Misplaced constant expression detected on the left side of a mapping entry; " + "please review your schema.mapping setting");
                }
                if (containsConstantExpressions(variables)) {
                    if (query != null) {
                        throw new IllegalArgumentException("Setting schema.query must not be defined when unloading if schema.mapping " + "contains a constant expression on the right side of a mapping entry");
                    }
                    if (!checkLiteralSelectorsSupported(session)) {
                        throw new IllegalStateException("At least one constant expression appears on the right side of a mapping entry, " + "but the cluster does not support CQL literals in the SELECT clause; " + " please review your schema.mapping setting");
                    }
                }
            }
            if ((preserveTimestamp || preserveTtl) && !mapping.isInferring()) {
                throw new IllegalStateException("Setting schema.mapping must contain an inferring entry (e.g. '*=*') " + "when schema.preserveTimestamp or schema.preserveTtl is enabled");
            }
        } else {
            mapping = new MappingInspector("*=*", schemaGenerationStrategy.isWriting(), mappingPreference);
        }
        // Misc
        nullToUnset = config.getBoolean(NULL_TO_UNSET);
        allowExtraFields = config.getBoolean(ALLOW_EXTRA_FIELDS);
        allowMissingFields = config.getBoolean(ALLOW_MISSING_FIELDS);
        splits = ConfigUtils.getThreads(config, SPLITS);
        if (hasGraphOptions(config)) {
            GraphUtils.checkGraphCompatibility(session);
            if (!isGraph(keyspace)) {
                throw new IllegalStateException("Graph operations requested but provided keyspace is not a graph: " + keyspaceName);
            }
            if (!isSupportedGraph(keyspace)) {
                assert ((DseGraphKeyspaceMetadata) keyspace).getGraphEngine().isPresent();
                throw new IllegalStateException(String.format("Graph operations requested but provided graph %s was created with an unsupported graph engine: %s", keyspaceName, ((DseGraphKeyspaceMetadata) keyspace).getGraphEngine().get()));
            }
        } else if (isGraph(keyspace)) {
            if (isSupportedGraph(keyspace)) {
                if (config.hasPath(KEYSPACE) || config.hasPath(TABLE)) {
                    LOGGER.warn("Provided keyspace is a graph; " + "instead of schema.keyspace and schema.table, please use graph-specific options " + "such as schema.graph, schema.vertex, schema.edge, schema.from and schema.to.");
                }
            } else {
                if (schemaGenerationStrategy == SchemaGenerationStrategy.MAP_AND_WRITE) {
                    LOGGER.warn("Provided keyspace is a graph created with a legacy graph engine: " + ((DseGraphKeyspaceMetadata) keyspace).getGraphEngine().get() + "; attempting to load data into such a keyspace is not supported and " + "may put the graph in an inconsistent state.");
                }
            }
        }
    } catch (ConfigException e) {
        throw ConfigUtils.convertConfigException(e, "dsbulk.schema");
    }
}
Also used : Instant(java.time.Instant) ConfigException(com.typesafe.config.ConfigException) CQLFragment(com.datastax.oss.dsbulk.mapping.CQLFragment) IndexedMappingField(com.datastax.oss.dsbulk.mapping.IndexedMappingField) MappingField(com.datastax.oss.dsbulk.mapping.MappingField) MappedMappingField(com.datastax.oss.dsbulk.mapping.MappedMappingField) ConfigException(com.typesafe.config.ConfigException) NestedBatchException(com.datastax.oss.dsbulk.workflow.commons.schema.NestedBatchException) QueryInspector(com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector) DseGraphKeyspaceMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseGraphKeyspaceMetadata) MappingInspector(com.datastax.oss.dsbulk.mapping.MappingInspector) CQLWord(com.datastax.oss.dsbulk.mapping.CQLWord)

Example 5 with MappingField

use of com.datastax.oss.dsbulk.mapping.MappingField in project dsbulk by datastax.

the class SchemaSettings method prepareStatementAndCreateMapping.

@NonNull
private Mapping prepareStatementAndCreateMapping(CqlSession session, boolean batchingEnabled, EnumSet<StatisticsMode> modes) {
    ImmutableMultimap<MappingField, CQLFragment> fieldsToVariables = null;
    if (!config.hasPath(QUERY)) {
        // in the absence of user-provided queries, create the mapping *before* query generation and
        // preparation
        List<CQLFragment> columns = table.getColumns().values().stream().filter(col -> !isDSESearchPseudoColumn(col)).flatMap(column -> {
            CQLWord colName = CQLWord.fromCqlIdentifier(column.getName());
            List<CQLFragment> cols = Lists.newArrayList(colName);
            if (schemaGenerationStrategy.isMapping()) {
                if (preserveTimestamp && checkWritetimeTtlSupported(column, WRITETIME)) {
                    cols.add(new FunctionCall(null, WRITETIME, colName));
                }
                if (preserveTtl && checkWritetimeTtlSupported(column, TTL)) {
                    cols.add(new FunctionCall(null, TTL, colName));
                }
            }
            return cols.stream();
        }).collect(Collectors.toList());
        fieldsToVariables = createFieldsToVariablesMap(columns);
        // query generation
        if (schemaGenerationStrategy.isWriting()) {
            if (isCounterTable()) {
                query = inferUpdateCounterQuery(fieldsToVariables);
            } else if (requiresBatchInsertQuery(fieldsToVariables)) {
                query = inferBatchInsertQuery(fieldsToVariables);
            } else {
                query = inferInsertQuery(fieldsToVariables);
            }
        } else if (schemaGenerationStrategy.isReading() && schemaGenerationStrategy.isMapping()) {
            query = inferReadQuery(fieldsToVariables);
        } else if (schemaGenerationStrategy.isReading() && schemaGenerationStrategy.isCounting()) {
            query = inferCountQuery(modes);
        } else {
            throw new IllegalStateException("Unsupported schema generation strategy: " + schemaGenerationStrategy);
        }
        LOGGER.debug("Inferred query: {}", query);
        queryInspector = new QueryInspector(query);
        // validate generated query
        if (schemaGenerationStrategy.isWriting()) {
            validatePrimaryKeyPresent(fieldsToVariables);
        }
    }
    assert query != null;
    assert queryInspector != null;
    if (!queryInspector.getKeyspaceName().isPresent()) {
        session.execute("USE " + keyspaceName);
    }
    // Transform user-provided queries before preparation
    if (config.hasPath(QUERY)) {
        if (schemaGenerationStrategy.isReading() && queryInspector.isParallelizable()) {
            int whereClauseIndex = queryInspector.getFromClauseEndIndex() + 1;
            StringBuilder sb = new StringBuilder(query.substring(0, whereClauseIndex));
            appendTokenRangeRestriction(sb);
            query = sb.append(query.substring(whereClauseIndex)).toString();
        }
        if (schemaGenerationStrategy.isCounting()) {
            if (modes.contains(StatisticsMode.partitions) || modes.contains(StatisticsMode.ranges) || modes.contains(StatisticsMode.hosts)) {
                throw new IllegalArgumentException(String.format("Cannot count with stats.modes = %s when schema.query is provided; " + "only stats.modes = [global] is allowed", modes));
            }
            // reduce row size by only selecting one column
            StringBuilder sb = new StringBuilder("SELECT ");
            sb.append(getGlobalCountSelector());
            query = sb.append(' ').append(query.substring(queryInspector.getFromClauseStartIndex())).toString();
        }
        queryInspector = new QueryInspector(query);
    }
    if (batchingEnabled && queryInspector.isBatch()) {
        preparedStatements = unwrapAndPrepareBatchChildStatements(session);
    } else {
        preparedStatements = Collections.singletonList(session.prepare(query));
    }
    if (config.hasPath(QUERY)) {
        // in the presence of user-provided queries, create the mapping *after* query preparation
        Stream<ColumnDefinitions> variables = getVariables();
        fieldsToVariables = createFieldsToVariablesMap(variables.flatMap(defs -> StreamSupport.stream(defs.spliterator(), false)).map(def -> def.getName().asInternal()).map(CQLWord::fromInternal).collect(Collectors.toList()));
        // validate user-provided query
        if (schemaGenerationStrategy.isWriting()) {
            if (mutatesOnlyStaticColumns()) {
                // DAT-414: mutations that only affect static columns are allowed
                // to skip the clustering columns, only the partition key should be present.
                validatePartitionKeyPresent(fieldsToVariables);
            } else {
                validatePrimaryKeyPresent(fieldsToVariables);
            }
        }
    }
    assert fieldsToVariables != null;
    return new DefaultMapping(transformFieldsToVariables(fieldsToVariables), codecFactory, transformWriteTimeVariables(queryInspector.getWriteTimeVariables()));
}
Also used : MAPPED_OR_INDEXED(com.datastax.oss.dsbulk.mapping.MappingPreference.MAPPED_OR_INDEXED) CqlIdentifier(com.datastax.oss.driver.api.core.CqlIdentifier) MAPPED_ONLY(com.datastax.oss.dsbulk.mapping.MappingPreference.MAPPED_ONLY) DefaultMapping(com.datastax.oss.dsbulk.mapping.DefaultMapping) GenericType(com.datastax.oss.driver.api.core.type.reflect.GenericType) TokenRangeReadStatementGenerator(com.datastax.oss.dsbulk.partitioner.TokenRangeReadStatementGenerator) ConfigUtils(com.datastax.oss.dsbulk.config.ConfigUtils) ALIASED_SELECTOR(com.datastax.oss.dsbulk.mapping.CQLRenderMode.ALIASED_SELECTOR) BatchType(com.datastax.oss.driver.api.core.cql.BatchType) Map(java.util.Map) VisibleForTesting(com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting) ProtocolVersion(com.datastax.oss.driver.api.core.ProtocolVersion) EnumSet(java.util.EnumSet) CQLRenderMode(com.datastax.oss.dsbulk.mapping.CQLRenderMode) VARIABLE(com.datastax.oss.dsbulk.mapping.CQLRenderMode.VARIABLE) MapType(com.datastax.oss.driver.api.core.type.MapType) GraphUtils(com.datastax.oss.dsbulk.workflow.commons.utils.GraphUtils) Set(java.util.Set) ImmutableList(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList) Field(com.datastax.oss.dsbulk.connectors.api.Field) Stream(java.util.stream.Stream) ConfigException(com.typesafe.config.ConfigException) CQLWord(com.datastax.oss.dsbulk.mapping.CQLWord) INDEXED_ONLY(com.datastax.oss.dsbulk.mapping.MappingPreference.INDEXED_ONLY) MICROSECONDS(java.util.concurrent.TimeUnit.MICROSECONDS) CQLLiteral(com.datastax.oss.dsbulk.mapping.CQLLiteral) ImmutableSet(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet) Preconditions(com.datastax.oss.driver.shaded.guava.common.base.Preconditions) STAR(com.datastax.oss.dsbulk.mapping.MappingInspector.STAR) ViewMetadata(com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) CqlSession(com.datastax.oss.driver.api.core.CqlSession) DefaultReadResultMapper(com.datastax.oss.dsbulk.workflow.commons.schema.DefaultReadResultMapper) FunctionCall(com.datastax.oss.dsbulk.mapping.FunctionCall) StreamSupport(java.util.stream.StreamSupport) Metadata(com.datastax.oss.driver.api.core.metadata.Metadata) LinkedHashSet(java.util.LinkedHashSet) DseEdgeMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata) TableMetadata(com.datastax.oss.driver.api.core.metadata.schema.TableMetadata) RelationMetadata(com.datastax.oss.driver.api.core.metadata.schema.RelationMetadata) MappingPreference(com.datastax.oss.dsbulk.mapping.MappingPreference) Config(com.typesafe.config.Config) NestedBatchException(com.datastax.oss.dsbulk.workflow.commons.schema.NestedBatchException) DataType(com.datastax.oss.driver.api.core.type.DataType) ConvertingCodecFactory(com.datastax.oss.dsbulk.codecs.api.ConvertingCodecFactory) DefaultRecordMapper(com.datastax.oss.dsbulk.workflow.commons.schema.DefaultRecordMapper) RecordMapper(com.datastax.oss.dsbulk.workflow.commons.schema.RecordMapper) DseTableMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata) CQLFragment(com.datastax.oss.dsbulk.mapping.CQLFragment) WRITETIME(com.datastax.oss.dsbulk.mapping.MappingInspector.WRITETIME) ReadResultMapper(com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultMapper) DseVertexMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata) Nullable(edu.umd.cs.findbugs.annotations.Nullable) StatisticsMode(com.datastax.oss.dsbulk.workflow.commons.settings.StatsSettings.StatisticsMode) DefaultReadResultCounter(com.datastax.oss.dsbulk.workflow.commons.schema.DefaultReadResultCounter) CodecUtils.instantToNumber(com.datastax.oss.dsbulk.codecs.api.util.CodecUtils.instantToNumber) IndexedMappingField(com.datastax.oss.dsbulk.mapping.IndexedMappingField) LoggerFactory(org.slf4j.LoggerFactory) MappingField(com.datastax.oss.dsbulk.mapping.MappingField) QueryInspector(com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector) Mapping(com.datastax.oss.dsbulk.mapping.Mapping) ReadResultCounter(com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultCounter) Lists(com.datastax.oss.driver.shaded.guava.common.collect.Lists) ConvertingCodec(com.datastax.oss.dsbulk.codecs.api.ConvertingCodec) NonNull(edu.umd.cs.findbugs.annotations.NonNull) Predicates(com.datastax.oss.driver.shaded.guava.common.base.Predicates) URI(java.net.URI) IndexMetadata(com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata) Record(com.datastax.oss.dsbulk.connectors.api.Record) ImmutableMultimap(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap) Predicate(java.util.function.Predicate) Collection(java.util.Collection) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) NAMED_ASSIGNMENT(com.datastax.oss.dsbulk.mapping.CQLRenderMode.NAMED_ASSIGNMENT) Objects(java.util.Objects) KeyspaceMetadata(com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata) DataTypes(com.datastax.oss.driver.api.core.type.DataTypes) List(java.util.List) Entry(java.util.Map.Entry) Optional(java.util.Optional) SetType(com.datastax.oss.driver.api.core.type.SetType) TIMESTAMP_PATTERN(com.datastax.oss.dsbulk.codecs.api.CommonConversionContext.TIMESTAMP_PATTERN) MappingInspector(com.datastax.oss.dsbulk.mapping.MappingInspector) DseGraphKeyspaceMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseGraphKeyspaceMetadata) ListType(com.datastax.oss.driver.api.core.type.ListType) HashSet(java.util.HashSet) RecordMetadata(com.datastax.oss.dsbulk.connectors.api.RecordMetadata) ColumnDefinitions(com.datastax.oss.driver.api.core.cql.ColumnDefinitions) MappedMappingField(com.datastax.oss.dsbulk.mapping.MappedMappingField) ImmutableSetMultimap(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSetMultimap) INTERNAL(com.datastax.oss.dsbulk.mapping.CQLRenderMode.INTERNAL) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) TypedCQLLiteral(com.datastax.oss.dsbulk.mapping.TypedCQLLiteral) PreparedStatement(com.datastax.oss.driver.api.core.cql.PreparedStatement) DseGraphTableMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseGraphTableMetadata) ColumnMetadata(com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata) DefaultProtocolVersion(com.datastax.oss.driver.api.core.DefaultProtocolVersion) UserDefinedType(com.datastax.oss.driver.api.core.type.UserDefinedType) TTL(com.datastax.oss.dsbulk.mapping.MappingInspector.TTL) Multimap(com.datastax.oss.driver.shaded.guava.common.collect.Multimap) EPOCH(java.time.Instant.EPOCH) Collections(java.util.Collections) Statement(com.datastax.oss.driver.api.core.cql.Statement) ColumnDefinitions(com.datastax.oss.driver.api.core.cql.ColumnDefinitions) DefaultMapping(com.datastax.oss.dsbulk.mapping.DefaultMapping) CQLFragment(com.datastax.oss.dsbulk.mapping.CQLFragment) IndexedMappingField(com.datastax.oss.dsbulk.mapping.IndexedMappingField) MappingField(com.datastax.oss.dsbulk.mapping.MappingField) MappedMappingField(com.datastax.oss.dsbulk.mapping.MappedMappingField) QueryInspector(com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector) CQLWord(com.datastax.oss.dsbulk.mapping.CQLWord) ImmutableList(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList) ArrayList(java.util.ArrayList) List(java.util.List) FunctionCall(com.datastax.oss.dsbulk.mapping.FunctionCall) NonNull(edu.umd.cs.findbugs.annotations.NonNull)

Aggregations

CQLFragment (com.datastax.oss.dsbulk.mapping.CQLFragment)6 IndexedMappingField (com.datastax.oss.dsbulk.mapping.IndexedMappingField)6 MappedMappingField (com.datastax.oss.dsbulk.mapping.MappedMappingField)6 MappingField (com.datastax.oss.dsbulk.mapping.MappingField)6 CQLWord (com.datastax.oss.dsbulk.mapping.CQLWord)5 FunctionCall (com.datastax.oss.dsbulk.mapping.FunctionCall)5 ImmutableMultimap (com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap)4 DseGraphKeyspaceMetadata (com.datastax.dse.driver.api.core.metadata.schema.DseGraphKeyspaceMetadata)3 CQLLiteral (com.datastax.oss.dsbulk.mapping.CQLLiteral)3 TypedCQLLiteral (com.datastax.oss.dsbulk.mapping.TypedCQLLiteral)3 DseEdgeMetadata (com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata)2 DseGraphTableMetadata (com.datastax.dse.driver.api.core.metadata.schema.DseGraphTableMetadata)2 DseTableMetadata (com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata)2 DseVertexMetadata (com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata)2 CqlIdentifier (com.datastax.oss.driver.api.core.CqlIdentifier)2 CqlSession (com.datastax.oss.driver.api.core.CqlSession)2 DefaultProtocolVersion (com.datastax.oss.driver.api.core.DefaultProtocolVersion)2 ProtocolVersion (com.datastax.oss.driver.api.core.ProtocolVersion)2 BatchType (com.datastax.oss.driver.api.core.cql.BatchType)2 ColumnDefinitions (com.datastax.oss.driver.api.core.cql.ColumnDefinitions)2