Search in sources :

Example 1 with QueryInspector

use of com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector in project dsbulk by datastax.

the class SchemaSettingsTest method should_preserve_writetime_and_ttl.

@ParameterizedTest
@MethodSource
void should_preserve_writetime_and_ttl(SchemaGenerationStrategy schemaGenerationStrategy, boolean preserveTimestamp, boolean preserveTtl, String mapping, String expectedQuery) {
    Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1", "preserveTimestamp", preserveTimestamp, "preserveTtl", preserveTtl, "mapping", mapping);
    SchemaSettings settings = new SchemaSettings(config, schemaGenerationStrategy);
    settings.init(session, codecFactory, true, true);
    if (schemaGenerationStrategy.isWriting()) {
        settings.createRecordMapper(session, recordMetadata, true);
    } else {
        settings.createReadResultMapper(session, recordMetadata, codecFactory, true);
    }
    assertThat(getInternalState(settings, "query")).isEqualTo(expectedQuery);
    QueryInspector queryInspector = new QueryInspector(expectedQuery);
    if (queryInspector.isBatch()) {
        List<String> childStatements = queryInspector.getBatchChildStatements();
        assertThat(getInternalState(settings, "preparedStatements")).asList().hasSize(childStatements.size());
        for (String childStatement : childStatements) {
            verify(session).prepare(childStatement);
        }
        verify(session, never()).prepare(expectedQuery);
    } else {
        assertThat(getInternalState(settings, "preparedStatements")).asList().hasSize(1);
        verify(session).prepare(expectedQuery);
    }
}
Also used : Config(com.typesafe.config.Config) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) QueryInspector(com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) MethodSource(org.junit.jupiter.params.provider.MethodSource)

Example 2 with QueryInspector

use of com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector in project dsbulk by datastax.

the class SchemaSettingsTest method should_unwrap_batch_query.

@ParameterizedTest
@MethodSource
void should_unwrap_batch_query(String query) {
    Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "query", StringUtils.quoteJson(query));
    SchemaSettings settings = new SchemaSettings(config, MAP_AND_WRITE);
    settings.init(session, codecFactory, true, true);
    settings.createRecordMapper(session, recordMetadata, true);
    assertThat(getInternalState(settings, "preparedStatements")).asList().hasSize(2);
    List<String> childStatements = new QueryInspector(query).getBatchChildStatements();
    verify(session).prepare(childStatements.get(0));
    verify(session).prepare(childStatements.get(1));
    verify(session, never()).prepare(query);
}
Also used : Config(com.typesafe.config.Config) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) QueryInspector(com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) MethodSource(org.junit.jupiter.params.provider.MethodSource)

Example 3 with QueryInspector

use of com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector in project dsbulk by datastax.

the class SchemaSettings method init.

public void init(CqlSession session, ConvertingCodecFactory codecFactory, boolean indexedMappingSupported, boolean mappedMappingSupported) {
    this.codecFactory = codecFactory;
    try {
        if (config.hasPath(KEYSPACE) && config.hasPath(GRAPH)) {
            throw new IllegalArgumentException("Settings schema.keyspace and schema.graph are mutually exclusive");
        }
        if (config.hasPath(TABLE) && config.hasPath(VERTEX)) {
            throw new IllegalArgumentException("Settings schema.table and schema.vertex are mutually exclusive");
        }
        if (config.hasPath(TABLE) && config.hasPath(EDGE)) {
            throw new IllegalArgumentException("Settings schema.table and schema.edge are mutually exclusive");
        }
        if (config.hasPath(VERTEX) && config.hasPath(EDGE)) {
            throw new IllegalArgumentException("Settings schema.vertex and schema.edge are mutually exclusive");
        }
        if (config.hasPath(EDGE)) {
            if (!config.hasPath(FROM)) {
                throw new IllegalArgumentException("Setting schema.from is required when schema.edge is specified");
            }
            if (!config.hasPath(TO)) {
                throw new IllegalArgumentException("Setting schema.to is required when schema.edge is specified");
            }
        }
        if (config.hasPath(QUERY) && (config.hasPath(TABLE) || config.hasPath(VERTEX) || config.hasPath(EDGE))) {
            throw new IllegalArgumentException("Setting schema.query must not be defined if schema.table, schema.vertex or schema.edge are defined");
        }
        if ((!config.hasPath(KEYSPACE) && !config.hasPath(GRAPH)) && (config.hasPath(TABLE) || config.hasPath(VERTEX) || config.hasPath(EDGE))) {
            throw new IllegalArgumentException("Settings schema.keyspace or schema.graph must be defined if schema.table, schema.vertex or schema.edge are defined");
        }
        if (config.hasPath(KEYSPACE)) {
            keyspace = locateKeyspace(session.getMetadata(), config.getString(KEYSPACE));
        } else if (config.hasPath(GRAPH)) {
            keyspace = locateKeyspace(session.getMetadata(), config.getString(GRAPH));
        }
        if (keyspace != null) {
            if (config.hasPath(TABLE)) {
                table = locateTable(keyspace, config.getString(TABLE));
            } else if (config.hasPath(VERTEX)) {
                table = locateVertexTable(keyspace, config.getString(VERTEX));
            } else if (config.hasPath(EDGE)) {
                table = locateEdgeTable(keyspace, config.getString(EDGE), config.getString(FROM), config.getString(TO));
            }
        }
        // Timestamp and TTL
        ttlSeconds = config.getInt(QUERY_TTL);
        if (config.hasPath(QUERY_TIMESTAMP)) {
            String timestampStr = config.getString(QUERY_TIMESTAMP);
            try {
                ConvertingCodec<String, Instant> codec = codecFactory.createConvertingCodec(DataTypes.TIMESTAMP, GenericType.STRING, true);
                Instant instant = codec.externalToInternal(timestampStr);
                this.timestampMicros = instantToNumber(instant, MICROSECONDS, EPOCH);
            } catch (Exception e) {
                Object format = codecFactory.getContext().getAttribute(TIMESTAMP_PATTERN);
                throw new IllegalArgumentException(String.format("Expecting schema.queryTimestamp to be in %s format but got '%s'", format, timestampStr));
            }
        } else {
            this.timestampMicros = -1L;
        }
        preserveTimestamp = config.getBoolean(PRESERVE_TIMESTAMP);
        preserveTtl = config.getBoolean(PRESERVE_TTL);
        if (config.hasPath(QUERY)) {
            query = config.getString(QUERY);
            queryInspector = new QueryInspector(query);
            if (queryInspector.getKeyspaceName().isPresent()) {
                if (keyspace != null) {
                    throw new IllegalArgumentException("Setting schema.keyspace must not be provided when schema.query contains a keyspace-qualified statement");
                }
                CQLWord keyspaceName = queryInspector.getKeyspaceName().get();
                keyspace = session.getMetadata().getKeyspace(keyspaceName.asIdentifier()).orElse(null);
                if (keyspace == null) {
                    throw new IllegalArgumentException(String.format("Value for schema.query references a non-existent keyspace: %s", keyspaceName.render(VARIABLE)));
                }
            } else if (keyspace == null) {
                throw new IllegalArgumentException("Setting schema.keyspace must be provided when schema.query does not contain a keyspace-qualified statement");
            }
            CQLWord tableName = queryInspector.getTableName();
            table = keyspace.getTable(tableName.asIdentifier()).orElse(null);
            if (table == null) {
                table = keyspace.getView(tableName.asIdentifier()).orElse(null);
                if (table == null) {
                    throw new IllegalArgumentException(String.format("Value for schema.query references a non-existent table or materialized view: %s", tableName.render(VARIABLE)));
                }
            }
            // If a query is provided, ttl and timestamp must not be.
            if (timestampMicros != -1 || ttlSeconds != -1) {
                throw new IllegalArgumentException("Setting schema.query must not be defined if schema.queryTtl or schema.queryTimestamp is defined");
            }
            if (preserveTimestamp || preserveTtl) {
                throw new IllegalArgumentException("Setting schema.query must not be defined if schema.preserveTimestamp or schema.preserveTtl is defined");
            }
        } else {
            if (keyspace == null || table == null) {
                // Either the keyspace and table must be present, or the query must be present.
                throw new IllegalArgumentException("When schema.query is not defined, " + "then either schema.keyspace or schema.graph must be defined, " + "and either schema.table, schema.vertex or schema.edge must be defined");
            }
        }
        assert keyspace != null;
        assert table != null;
        keyspaceName = CQLWord.fromCqlIdentifier(keyspace.getName());
        tableName = CQLWord.fromCqlIdentifier(table.getName());
        if (indexedMappingSupported && mappedMappingSupported) {
            mappingPreference = MAPPED_OR_INDEXED;
        } else if (indexedMappingSupported) {
            mappingPreference = INDEXED_ONLY;
        } else if (mappedMappingSupported) {
            mappingPreference = MAPPED_ONLY;
        } else if (schemaGenerationStrategy.isMapping()) {
            throw new IllegalArgumentException("Connector must support at least one of indexed or mapped mappings");
        }
        if (config.hasPath(MAPPING)) {
            if (!schemaGenerationStrategy.isMapping()) {
                throw new IllegalArgumentException("Setting schema.mapping must not be defined when counting rows in a table");
            }
            Supplier<CQLWord> usingTimestampVariable = null;
            Supplier<CQLWord> usingTTLVariable = null;
            if (queryInspector != null) {
                usingTimestampVariable = queryInspector.getUsingTimestampVariable()::get;
                usingTTLVariable = queryInspector.getUsingTTLVariable()::get;
            }
            // TODO remove support for providing external variable names for the deprecated
            // __ttl and __timestamp mapping tokens.
            @SuppressWarnings("deprecation") MappingInspector mapping = new MappingInspector(config.getString(MAPPING), schemaGenerationStrategy.isWriting(), mappingPreference, usingTimestampVariable, usingTTLVariable);
            this.mapping = mapping;
            Set<MappingField> fields = mapping.getExplicitMappings().keySet();
            Collection<CQLFragment> variables = mapping.getExplicitMappings().values();
            if (schemaGenerationStrategy.isWriting()) {
                // now() = c1 only allowed if schema.query not present
                if (containsFunctionCalls(variables, WRITETIME_OR_TTL.negate())) {
                    throw new IllegalArgumentException("Misplaced function call detected on the right side of a mapping entry; " + "please review your schema.mapping setting");
                }
                if (query != null && containsFunctionCalls(variables, WRITETIME_OR_TTL)) {
                    throw new IllegalArgumentException("Setting schema.query must not be defined when loading if schema.mapping " + "contains a writetime or ttl function on the right side of a mapping entry");
                }
                if (query != null && containsFunctionCalls(fields)) {
                    throw new IllegalArgumentException("Setting schema.query must not be defined when loading if schema.mapping " + "contains a function on the left side of a mapping entry");
                }
                if (containsWritetimeOrTTLFunctionCalls(mapping.getExplicitMappings())) {
                    throw new IllegalArgumentException("Misplaced function call detected on the left side of a writetime or TTL mapping entry; " + "please review your schema.mapping setting");
                }
                // (text)'abc' = c1 only allowed if schema.query not present
                if (containsConstantExpressions(variables)) {
                    throw new IllegalArgumentException("Misplaced constant expression detected on the right side of a mapping entry; " + "please review your schema.mapping setting");
                }
                if (query != null && containsConstantExpressions(fields)) {
                    throw new IllegalArgumentException("Setting schema.query must not be defined when loading if schema.mapping " + "contains a constant expression on the left side of a mapping entry");
                }
            }
            if (schemaGenerationStrategy.isReading()) {
                // f1 = now() only allowed if schema.query not present
                if (containsFunctionCalls(fields)) {
                    throw new IllegalArgumentException("Misplaced function call detected on the left side of a mapping entry; " + "please review your schema.mapping setting");
                }
                if (query != null && containsFunctionCalls(variables)) {
                    throw new IllegalArgumentException("Setting schema.query must not be defined when unloading if schema.mapping " + "contains a function on the right side of a mapping entry");
                }
                // supported
                if (containsConstantExpressions(fields)) {
                    throw new IllegalArgumentException("Misplaced constant expression detected on the left side of a mapping entry; " + "please review your schema.mapping setting");
                }
                if (containsConstantExpressions(variables)) {
                    if (query != null) {
                        throw new IllegalArgumentException("Setting schema.query must not be defined when unloading if schema.mapping " + "contains a constant expression on the right side of a mapping entry");
                    }
                    if (!checkLiteralSelectorsSupported(session)) {
                        throw new IllegalStateException("At least one constant expression appears on the right side of a mapping entry, " + "but the cluster does not support CQL literals in the SELECT clause; " + " please review your schema.mapping setting");
                    }
                }
            }
            if ((preserveTimestamp || preserveTtl) && !mapping.isInferring()) {
                throw new IllegalStateException("Setting schema.mapping must contain an inferring entry (e.g. '*=*') " + "when schema.preserveTimestamp or schema.preserveTtl is enabled");
            }
        } else {
            mapping = new MappingInspector("*=*", schemaGenerationStrategy.isWriting(), mappingPreference);
        }
        // Misc
        nullToUnset = config.getBoolean(NULL_TO_UNSET);
        allowExtraFields = config.getBoolean(ALLOW_EXTRA_FIELDS);
        allowMissingFields = config.getBoolean(ALLOW_MISSING_FIELDS);
        splits = ConfigUtils.getThreads(config, SPLITS);
        if (hasGraphOptions(config)) {
            GraphUtils.checkGraphCompatibility(session);
            if (!isGraph(keyspace)) {
                throw new IllegalStateException("Graph operations requested but provided keyspace is not a graph: " + keyspaceName);
            }
            if (!isSupportedGraph(keyspace)) {
                assert ((DseGraphKeyspaceMetadata) keyspace).getGraphEngine().isPresent();
                throw new IllegalStateException(String.format("Graph operations requested but provided graph %s was created with an unsupported graph engine: %s", keyspaceName, ((DseGraphKeyspaceMetadata) keyspace).getGraphEngine().get()));
            }
        } else if (isGraph(keyspace)) {
            if (isSupportedGraph(keyspace)) {
                if (config.hasPath(KEYSPACE) || config.hasPath(TABLE)) {
                    LOGGER.warn("Provided keyspace is a graph; " + "instead of schema.keyspace and schema.table, please use graph-specific options " + "such as schema.graph, schema.vertex, schema.edge, schema.from and schema.to.");
                }
            } else {
                if (schemaGenerationStrategy == SchemaGenerationStrategy.MAP_AND_WRITE) {
                    LOGGER.warn("Provided keyspace is a graph created with a legacy graph engine: " + ((DseGraphKeyspaceMetadata) keyspace).getGraphEngine().get() + "; attempting to load data into such a keyspace is not supported and " + "may put the graph in an inconsistent state.");
                }
            }
        }
    } catch (ConfigException e) {
        throw ConfigUtils.convertConfigException(e, "dsbulk.schema");
    }
}
Also used : Instant(java.time.Instant) ConfigException(com.typesafe.config.ConfigException) CQLFragment(com.datastax.oss.dsbulk.mapping.CQLFragment) IndexedMappingField(com.datastax.oss.dsbulk.mapping.IndexedMappingField) MappingField(com.datastax.oss.dsbulk.mapping.MappingField) MappedMappingField(com.datastax.oss.dsbulk.mapping.MappedMappingField) ConfigException(com.typesafe.config.ConfigException) NestedBatchException(com.datastax.oss.dsbulk.workflow.commons.schema.NestedBatchException) QueryInspector(com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector) DseGraphKeyspaceMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseGraphKeyspaceMetadata) MappingInspector(com.datastax.oss.dsbulk.mapping.MappingInspector) CQLWord(com.datastax.oss.dsbulk.mapping.CQLWord)

Example 4 with QueryInspector

use of com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector in project dsbulk by datastax.

the class SchemaSettings method prepareStatementAndCreateMapping.

@NonNull
private Mapping prepareStatementAndCreateMapping(CqlSession session, boolean batchingEnabled, EnumSet<StatisticsMode> modes) {
    ImmutableMultimap<MappingField, CQLFragment> fieldsToVariables = null;
    if (!config.hasPath(QUERY)) {
        // in the absence of user-provided queries, create the mapping *before* query generation and
        // preparation
        List<CQLFragment> columns = table.getColumns().values().stream().filter(col -> !isDSESearchPseudoColumn(col)).flatMap(column -> {
            CQLWord colName = CQLWord.fromCqlIdentifier(column.getName());
            List<CQLFragment> cols = Lists.newArrayList(colName);
            if (schemaGenerationStrategy.isMapping()) {
                if (preserveTimestamp && checkWritetimeTtlSupported(column, WRITETIME)) {
                    cols.add(new FunctionCall(null, WRITETIME, colName));
                }
                if (preserveTtl && checkWritetimeTtlSupported(column, TTL)) {
                    cols.add(new FunctionCall(null, TTL, colName));
                }
            }
            return cols.stream();
        }).collect(Collectors.toList());
        fieldsToVariables = createFieldsToVariablesMap(columns);
        // query generation
        if (schemaGenerationStrategy.isWriting()) {
            if (isCounterTable()) {
                query = inferUpdateCounterQuery(fieldsToVariables);
            } else if (requiresBatchInsertQuery(fieldsToVariables)) {
                query = inferBatchInsertQuery(fieldsToVariables);
            } else {
                query = inferInsertQuery(fieldsToVariables);
            }
        } else if (schemaGenerationStrategy.isReading() && schemaGenerationStrategy.isMapping()) {
            query = inferReadQuery(fieldsToVariables);
        } else if (schemaGenerationStrategy.isReading() && schemaGenerationStrategy.isCounting()) {
            query = inferCountQuery(modes);
        } else {
            throw new IllegalStateException("Unsupported schema generation strategy: " + schemaGenerationStrategy);
        }
        LOGGER.debug("Inferred query: {}", query);
        queryInspector = new QueryInspector(query);
        // validate generated query
        if (schemaGenerationStrategy.isWriting()) {
            validatePrimaryKeyPresent(fieldsToVariables);
        }
    }
    assert query != null;
    assert queryInspector != null;
    if (!queryInspector.getKeyspaceName().isPresent()) {
        session.execute("USE " + keyspaceName);
    }
    // Transform user-provided queries before preparation
    if (config.hasPath(QUERY)) {
        if (schemaGenerationStrategy.isReading() && queryInspector.isParallelizable()) {
            int whereClauseIndex = queryInspector.getFromClauseEndIndex() + 1;
            StringBuilder sb = new StringBuilder(query.substring(0, whereClauseIndex));
            appendTokenRangeRestriction(sb);
            query = sb.append(query.substring(whereClauseIndex)).toString();
        }
        if (schemaGenerationStrategy.isCounting()) {
            if (modes.contains(StatisticsMode.partitions) || modes.contains(StatisticsMode.ranges) || modes.contains(StatisticsMode.hosts)) {
                throw new IllegalArgumentException(String.format("Cannot count with stats.modes = %s when schema.query is provided; " + "only stats.modes = [global] is allowed", modes));
            }
            // reduce row size by only selecting one column
            StringBuilder sb = new StringBuilder("SELECT ");
            sb.append(getGlobalCountSelector());
            query = sb.append(' ').append(query.substring(queryInspector.getFromClauseStartIndex())).toString();
        }
        queryInspector = new QueryInspector(query);
    }
    if (batchingEnabled && queryInspector.isBatch()) {
        preparedStatements = unwrapAndPrepareBatchChildStatements(session);
    } else {
        preparedStatements = Collections.singletonList(session.prepare(query));
    }
    if (config.hasPath(QUERY)) {
        // in the presence of user-provided queries, create the mapping *after* query preparation
        Stream<ColumnDefinitions> variables = getVariables();
        fieldsToVariables = createFieldsToVariablesMap(variables.flatMap(defs -> StreamSupport.stream(defs.spliterator(), false)).map(def -> def.getName().asInternal()).map(CQLWord::fromInternal).collect(Collectors.toList()));
        // validate user-provided query
        if (schemaGenerationStrategy.isWriting()) {
            if (mutatesOnlyStaticColumns()) {
                // DAT-414: mutations that only affect static columns are allowed
                // to skip the clustering columns, only the partition key should be present.
                validatePartitionKeyPresent(fieldsToVariables);
            } else {
                validatePrimaryKeyPresent(fieldsToVariables);
            }
        }
    }
    assert fieldsToVariables != null;
    return new DefaultMapping(transformFieldsToVariables(fieldsToVariables), codecFactory, transformWriteTimeVariables(queryInspector.getWriteTimeVariables()));
}
Also used : MAPPED_OR_INDEXED(com.datastax.oss.dsbulk.mapping.MappingPreference.MAPPED_OR_INDEXED) CqlIdentifier(com.datastax.oss.driver.api.core.CqlIdentifier) MAPPED_ONLY(com.datastax.oss.dsbulk.mapping.MappingPreference.MAPPED_ONLY) DefaultMapping(com.datastax.oss.dsbulk.mapping.DefaultMapping) GenericType(com.datastax.oss.driver.api.core.type.reflect.GenericType) TokenRangeReadStatementGenerator(com.datastax.oss.dsbulk.partitioner.TokenRangeReadStatementGenerator) ConfigUtils(com.datastax.oss.dsbulk.config.ConfigUtils) ALIASED_SELECTOR(com.datastax.oss.dsbulk.mapping.CQLRenderMode.ALIASED_SELECTOR) BatchType(com.datastax.oss.driver.api.core.cql.BatchType) Map(java.util.Map) VisibleForTesting(com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting) ProtocolVersion(com.datastax.oss.driver.api.core.ProtocolVersion) EnumSet(java.util.EnumSet) CQLRenderMode(com.datastax.oss.dsbulk.mapping.CQLRenderMode) VARIABLE(com.datastax.oss.dsbulk.mapping.CQLRenderMode.VARIABLE) MapType(com.datastax.oss.driver.api.core.type.MapType) GraphUtils(com.datastax.oss.dsbulk.workflow.commons.utils.GraphUtils) Set(java.util.Set) ImmutableList(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList) Field(com.datastax.oss.dsbulk.connectors.api.Field) Stream(java.util.stream.Stream) ConfigException(com.typesafe.config.ConfigException) CQLWord(com.datastax.oss.dsbulk.mapping.CQLWord) INDEXED_ONLY(com.datastax.oss.dsbulk.mapping.MappingPreference.INDEXED_ONLY) MICROSECONDS(java.util.concurrent.TimeUnit.MICROSECONDS) CQLLiteral(com.datastax.oss.dsbulk.mapping.CQLLiteral) ImmutableSet(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet) Preconditions(com.datastax.oss.driver.shaded.guava.common.base.Preconditions) STAR(com.datastax.oss.dsbulk.mapping.MappingInspector.STAR) ViewMetadata(com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) CqlSession(com.datastax.oss.driver.api.core.CqlSession) DefaultReadResultMapper(com.datastax.oss.dsbulk.workflow.commons.schema.DefaultReadResultMapper) FunctionCall(com.datastax.oss.dsbulk.mapping.FunctionCall) StreamSupport(java.util.stream.StreamSupport) Metadata(com.datastax.oss.driver.api.core.metadata.Metadata) LinkedHashSet(java.util.LinkedHashSet) DseEdgeMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata) TableMetadata(com.datastax.oss.driver.api.core.metadata.schema.TableMetadata) RelationMetadata(com.datastax.oss.driver.api.core.metadata.schema.RelationMetadata) MappingPreference(com.datastax.oss.dsbulk.mapping.MappingPreference) Config(com.typesafe.config.Config) NestedBatchException(com.datastax.oss.dsbulk.workflow.commons.schema.NestedBatchException) DataType(com.datastax.oss.driver.api.core.type.DataType) ConvertingCodecFactory(com.datastax.oss.dsbulk.codecs.api.ConvertingCodecFactory) DefaultRecordMapper(com.datastax.oss.dsbulk.workflow.commons.schema.DefaultRecordMapper) RecordMapper(com.datastax.oss.dsbulk.workflow.commons.schema.RecordMapper) DseTableMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata) CQLFragment(com.datastax.oss.dsbulk.mapping.CQLFragment) WRITETIME(com.datastax.oss.dsbulk.mapping.MappingInspector.WRITETIME) ReadResultMapper(com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultMapper) DseVertexMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata) Nullable(edu.umd.cs.findbugs.annotations.Nullable) StatisticsMode(com.datastax.oss.dsbulk.workflow.commons.settings.StatsSettings.StatisticsMode) DefaultReadResultCounter(com.datastax.oss.dsbulk.workflow.commons.schema.DefaultReadResultCounter) CodecUtils.instantToNumber(com.datastax.oss.dsbulk.codecs.api.util.CodecUtils.instantToNumber) IndexedMappingField(com.datastax.oss.dsbulk.mapping.IndexedMappingField) LoggerFactory(org.slf4j.LoggerFactory) MappingField(com.datastax.oss.dsbulk.mapping.MappingField) QueryInspector(com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector) Mapping(com.datastax.oss.dsbulk.mapping.Mapping) ReadResultCounter(com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultCounter) Lists(com.datastax.oss.driver.shaded.guava.common.collect.Lists) ConvertingCodec(com.datastax.oss.dsbulk.codecs.api.ConvertingCodec) NonNull(edu.umd.cs.findbugs.annotations.NonNull) Predicates(com.datastax.oss.driver.shaded.guava.common.base.Predicates) URI(java.net.URI) IndexMetadata(com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata) Record(com.datastax.oss.dsbulk.connectors.api.Record) ImmutableMultimap(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap) Predicate(java.util.function.Predicate) Collection(java.util.Collection) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) NAMED_ASSIGNMENT(com.datastax.oss.dsbulk.mapping.CQLRenderMode.NAMED_ASSIGNMENT) Objects(java.util.Objects) KeyspaceMetadata(com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata) DataTypes(com.datastax.oss.driver.api.core.type.DataTypes) List(java.util.List) Entry(java.util.Map.Entry) Optional(java.util.Optional) SetType(com.datastax.oss.driver.api.core.type.SetType) TIMESTAMP_PATTERN(com.datastax.oss.dsbulk.codecs.api.CommonConversionContext.TIMESTAMP_PATTERN) MappingInspector(com.datastax.oss.dsbulk.mapping.MappingInspector) DseGraphKeyspaceMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseGraphKeyspaceMetadata) ListType(com.datastax.oss.driver.api.core.type.ListType) HashSet(java.util.HashSet) RecordMetadata(com.datastax.oss.dsbulk.connectors.api.RecordMetadata) ColumnDefinitions(com.datastax.oss.driver.api.core.cql.ColumnDefinitions) MappedMappingField(com.datastax.oss.dsbulk.mapping.MappedMappingField) ImmutableSetMultimap(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSetMultimap) INTERNAL(com.datastax.oss.dsbulk.mapping.CQLRenderMode.INTERNAL) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) TypedCQLLiteral(com.datastax.oss.dsbulk.mapping.TypedCQLLiteral) PreparedStatement(com.datastax.oss.driver.api.core.cql.PreparedStatement) DseGraphTableMetadata(com.datastax.dse.driver.api.core.metadata.schema.DseGraphTableMetadata) ColumnMetadata(com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata) DefaultProtocolVersion(com.datastax.oss.driver.api.core.DefaultProtocolVersion) UserDefinedType(com.datastax.oss.driver.api.core.type.UserDefinedType) TTL(com.datastax.oss.dsbulk.mapping.MappingInspector.TTL) Multimap(com.datastax.oss.driver.shaded.guava.common.collect.Multimap) EPOCH(java.time.Instant.EPOCH) Collections(java.util.Collections) Statement(com.datastax.oss.driver.api.core.cql.Statement) ColumnDefinitions(com.datastax.oss.driver.api.core.cql.ColumnDefinitions) DefaultMapping(com.datastax.oss.dsbulk.mapping.DefaultMapping) CQLFragment(com.datastax.oss.dsbulk.mapping.CQLFragment) IndexedMappingField(com.datastax.oss.dsbulk.mapping.IndexedMappingField) MappingField(com.datastax.oss.dsbulk.mapping.MappingField) MappedMappingField(com.datastax.oss.dsbulk.mapping.MappedMappingField) QueryInspector(com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector) CQLWord(com.datastax.oss.dsbulk.mapping.CQLWord) ImmutableList(com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList) ArrayList(java.util.ArrayList) List(java.util.List) FunctionCall(com.datastax.oss.dsbulk.mapping.FunctionCall) NonNull(edu.umd.cs.findbugs.annotations.NonNull)

Aggregations

QueryInspector (com.datastax.oss.dsbulk.workflow.commons.schema.QueryInspector)4 DseGraphKeyspaceMetadata (com.datastax.dse.driver.api.core.metadata.schema.DseGraphKeyspaceMetadata)2 CQLFragment (com.datastax.oss.dsbulk.mapping.CQLFragment)2 CQLWord (com.datastax.oss.dsbulk.mapping.CQLWord)2 IndexedMappingField (com.datastax.oss.dsbulk.mapping.IndexedMappingField)2 MappedMappingField (com.datastax.oss.dsbulk.mapping.MappedMappingField)2 MappingField (com.datastax.oss.dsbulk.mapping.MappingField)2 MappingInspector (com.datastax.oss.dsbulk.mapping.MappingInspector)2 NestedBatchException (com.datastax.oss.dsbulk.workflow.commons.schema.NestedBatchException)2 Config (com.typesafe.config.Config)2 ConfigException (com.typesafe.config.ConfigException)2 Instant (java.time.Instant)2 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)2 MethodSource (org.junit.jupiter.params.provider.MethodSource)2 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)2 DseEdgeMetadata (com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata)1 DseGraphTableMetadata (com.datastax.dse.driver.api.core.metadata.schema.DseGraphTableMetadata)1 DseTableMetadata (com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata)1 DseVertexMetadata (com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata)1 CqlIdentifier (com.datastax.oss.driver.api.core.CqlIdentifier)1