Search in sources :

Example 26 with ExitStatus

use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.

the class CSVConnectorEndToEndCCMIT method load_ttl_timestamp_now_in_query_and_mapping_external_names_unset_values.

@Test
void load_ttl_timestamp_now_in_query_and_mapping_external_names_unset_values() {
    assumeTrue(protocolVersion.getCode() >= DefaultProtocolVersion.V4.getCode(), "Unset values are not compatible with protocol version < 4");
    session.execute("DROP TABLE IF EXISTS table_ttl_timestamp");
    session.execute("CREATE TABLE table_ttl_timestamp (key int PRIMARY KEY, value text, loaded_at timeuuid)");
    List<String> args = Lists.newArrayList("load", "--log.directory", quoteJson(logDir), "--connector.csv.ignoreLeadingWhitespaces", "true", "--connector.csv.ignoreTrailingWhitespaces", "true", "--connector.csv.url", ClassLoader.getSystemResource("ttl-timestamp-unset.csv").toExternalForm(), "--driver.pooling.local.connections", "1", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.query", "insert into table_ttl_timestamp (key, value, loaded_at) values (:key, :value, now()) using ttl :t1 and timestamp :t2", "--schema.mapping", // map to t2 and t1 respectively
    "*=*, created_at = __timestamp, time_to_live = __ttl");
    ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
    assertStatus(status, STATUS_OK);
    assertTTLAndTimestampUnsetValues();
    assertThat(logs).hasMessageContaining("The special __ttl mapping token has been deprecated").hasMessageContaining("The special __timestamp mapping token has been deprecated");
    logs.clear();
}
Also used : ExitStatus(com.datastax.oss.dsbulk.runner.ExitStatus) DataStaxBulkLoader(com.datastax.oss.dsbulk.runner.DataStaxBulkLoader) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Example 27 with ExitStatus

use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.

the class CSVConnectorEndToEndCCMIT method load_ttl_timestamp_now_in_query_and_mapping_positional_external_names_unset_values.

@Test
void load_ttl_timestamp_now_in_query_and_mapping_positional_external_names_unset_values() {
    assumeTrue(protocolVersion.getCode() >= DefaultProtocolVersion.V4.getCode(), "Unset values are not compatible with protocol version < 4");
    session.execute("DROP TABLE IF EXISTS table_ttl_timestamp");
    session.execute("CREATE TABLE table_ttl_timestamp (key int PRIMARY KEY, value text, loaded_at timeuuid)");
    List<String> args = Lists.newArrayList("load", "--log.directory", quoteJson(logDir), "--connector.csv.ignoreLeadingWhitespaces", "true", "--connector.csv.ignoreTrailingWhitespaces", "true", "--connector.csv.url", ClassLoader.getSystemResource("ttl-timestamp-unset.csv").toExternalForm(), "--driver.pooling.local.connections", "1", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.query", "insert into table_ttl_timestamp (key, value, loaded_at) values (?, ?, now()) using ttl ? and timestamp ?", "--schema.mapping", quoteJson("*=*, created_at = \"[timestamp]\", time_to_live = \"[ttl]\""));
    ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
    assertStatus(status, STATUS_OK);
    assertTTLAndTimestampUnsetValues();
}
Also used : ExitStatus(com.datastax.oss.dsbulk.runner.ExitStatus) DataStaxBulkLoader(com.datastax.oss.dsbulk.runner.DataStaxBulkLoader) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Example 28 with ExitStatus

use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.

the class CSVConnectorEndToEndCCMIT method missing_key_with_custom_query.

@Test
void missing_key_with_custom_query() {
    List<String> args = new ArrayList<>();
    args.add("load");
    args.add("--connector.csv.url");
    args.add(quoteJson(CsvUtils.CSV_RECORDS_UNIQUE));
    args.add("--connector.csv.header");
    args.add("false");
    args.add("--schema.keyspace");
    args.add(session.getKeyspace().get().asInternal());
    args.add("--schema.query");
    args.add(INSERT_INTO_IP_BY_COUNTRY);
    args.add("--schema.mapping");
    args.add("0=beginning_ip_address,1=ending_ip_address,2=beginning_ip_number,3=ending_ip_number, 5=country_name");
    ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
    assertStatus(status, STATUS_ABORTED_FATAL_ERROR);
    validateErrorMessageLogged("Missing required primary key column country_code");
}
Also used : ExitStatus(com.datastax.oss.dsbulk.runner.ExitStatus) ArrayList(java.util.ArrayList) DataStaxBulkLoader(com.datastax.oss.dsbulk.runner.DataStaxBulkLoader) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Example 29 with ExitStatus

use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.

the class CSVConnectorEndToEndCCMIT method unload_user_defined_functions_mapping.

@Test
void unload_user_defined_functions_mapping() throws IOException {
    assumeTrue(ccm.getCassandraVersion().compareTo(V2_2) >= 0, "User-defined functions are not compatible with C* < 2.2");
    session.execute("DROP TABLE IF EXISTS udf_table");
    session.execute("CREATE TABLE udf_table (pk int PRIMARY KEY, \"Value 1\" int, \"Value 2\" int)");
    session.execute("INSERT INTO udf_table (pk, \"Value 1\", \"Value 2\") VALUES (0,1,2)");
    session.execute("DROP FUNCTION IF EXISTS plus");
    session.execute("CREATE FUNCTION plus(s int, v int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return s+v;';");
    List<String> args = Lists.newArrayList("unload", "--log.directory", quoteJson(logDir), "-header", "true", "--connector.csv.url", quoteJson(unloadDir), "--connector.csv.maxConcurrentFiles", "1", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.table", "udf_table", "--schema.mapping", quoteJson("* = [-pk], SUM = plus(\"Value 1\", \"Value 2\")"));
    ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
    assertStatus(status, STATUS_OK);
    List<String> lines = FileUtils.readAllLinesInDirectoryAsStream(unloadDir).collect(Collectors.toList());
    assertThat(lines).containsExactly("SUM,Value 1,Value 2", "3,1,2");
}
Also used : ExitStatus(com.datastax.oss.dsbulk.runner.ExitStatus) DataStaxBulkLoader(com.datastax.oss.dsbulk.runner.DataStaxBulkLoader) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Example 30 with ExitStatus

use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.

the class CSVConnectorEndToEndCCMIT method should_respect_query_variables_order.

/**
 * Test for DAT-253.
 */
@Test
void should_respect_query_variables_order() throws Exception {
    session.execute("DROP TABLE IF EXISTS mapping");
    session.execute("CREATE TABLE IF NOT EXISTS mapping (key int PRIMARY KEY, value varchar)");
    List<String> args = new ArrayList<>();
    args.add("load");
    args.add("--connector.csv.url");
    args.add(ClassLoader.getSystemResource("invalid-mapping.csv").toExternalForm());
    args.add("--connector.csv.header");
    args.add("false");
    args.add("--schema.keyspace");
    args.add(session.getKeyspace().get().asInternal());
    args.add("--schema.query");
    // 0 = value, 1 = key
    args.add("INSERT INTO mapping (value, key) VALUES (?, ?)");
    ExitStatus loadStatus = new DataStaxBulkLoader(addCommonSettings(args)).run();
    assertStatus(loadStatus, STATUS_COMPLETED_WITH_ERRORS);
    assertThat(logs).hasMessageContaining("At least 1 record does not match the provided schema.mapping or schema.query");
    FileUtils.deleteDirectory(logDir);
    args = new ArrayList<>();
    args.add("unload");
    args.add("--connector.csv.url");
    args.add(quoteJson(unloadDir));
    args.add("--connector.csv.header");
    args.add("false");
    args.add("--connector.csv.maxConcurrentFiles");
    args.add("1");
    args.add("--schema.keyspace");
    args.add(session.getKeyspace().get().asInternal());
    args.add("--schema.query");
    // the columns should be exported as they appear in the SELECT clause, so 'value,key' and not
    // 'key,value'
    args.add("SELECT value, key FROM mapping");
    ExitStatus unloadStatus = new DataStaxBulkLoader(addCommonSettings(args)).run();
    assertStatus(unloadStatus, STATUS_OK);
    List<String> lines = FileUtils.readAllLinesInDirectoryAsStream(unloadDir).collect(Collectors.toList());
    assertThat(lines).hasSize(2).contains("ok1,1").contains("ok2,2");
}
Also used : ExitStatus(com.datastax.oss.dsbulk.runner.ExitStatus) ArrayList(java.util.ArrayList) DataStaxBulkLoader(com.datastax.oss.dsbulk.runner.DataStaxBulkLoader) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) Test(org.junit.jupiter.api.Test)

Aggregations

DataStaxBulkLoader (com.datastax.oss.dsbulk.runner.DataStaxBulkLoader)165 ExitStatus (com.datastax.oss.dsbulk.runner.ExitStatus)165 Test (org.junit.jupiter.api.Test)142 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)135 ArrayList (java.util.ArrayList)75 Row (com.datastax.oss.driver.api.core.cql.Row)30 RequestPrime (com.datastax.oss.simulacron.common.cluster.RequestPrime)30 Prime (com.datastax.oss.simulacron.common.stubbing.Prime)30 CqlIdentifier (com.datastax.oss.driver.api.core.CqlIdentifier)22 ResultSet (com.datastax.oss.driver.api.core.cql.ResultSet)14 MethodSource (org.junit.jupiter.params.provider.MethodSource)10 Column (com.datastax.oss.dsbulk.tests.simulacron.SimulacronUtils.Column)9 Table (com.datastax.oss.dsbulk.tests.simulacron.SimulacronUtils.Table)9 URL (java.net.URL)9 EndToEndUtils.primeIpByCountryTable (com.datastax.oss.dsbulk.runner.tests.EndToEndUtils.primeIpByCountryTable)8 CsvSource (org.junit.jupiter.params.provider.CsvSource)7 Record (com.datastax.oss.dsbulk.connectors.api.Record)6 SimulacronUtils (com.datastax.oss.dsbulk.tests.simulacron.SimulacronUtils)5 Keyspace (com.datastax.oss.dsbulk.tests.simulacron.SimulacronUtils.Keyspace)5 Path (java.nio.file.Path)5