use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method load_ttl_timestamp_now_in_query_and_mapping_external_names_unset_values.
@Test
void load_ttl_timestamp_now_in_query_and_mapping_external_names_unset_values() {
assumeTrue(protocolVersion.getCode() >= DefaultProtocolVersion.V4.getCode(), "Unset values are not compatible with protocol version < 4");
session.execute("DROP TABLE IF EXISTS table_ttl_timestamp");
session.execute("CREATE TABLE table_ttl_timestamp (key int PRIMARY KEY, value text, loaded_at timeuuid)");
List<String> args = Lists.newArrayList("load", "--log.directory", quoteJson(logDir), "--connector.csv.ignoreLeadingWhitespaces", "true", "--connector.csv.ignoreTrailingWhitespaces", "true", "--connector.csv.url", ClassLoader.getSystemResource("ttl-timestamp-unset.csv").toExternalForm(), "--driver.pooling.local.connections", "1", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.query", "insert into table_ttl_timestamp (key, value, loaded_at) values (:key, :value, now()) using ttl :t1 and timestamp :t2", "--schema.mapping", // map to t2 and t1 respectively
"*=*, created_at = __timestamp, time_to_live = __ttl");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
assertTTLAndTimestampUnsetValues();
assertThat(logs).hasMessageContaining("The special __ttl mapping token has been deprecated").hasMessageContaining("The special __timestamp mapping token has been deprecated");
logs.clear();
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method load_ttl_timestamp_now_in_query_and_mapping_positional_external_names_unset_values.
@Test
void load_ttl_timestamp_now_in_query_and_mapping_positional_external_names_unset_values() {
assumeTrue(protocolVersion.getCode() >= DefaultProtocolVersion.V4.getCode(), "Unset values are not compatible with protocol version < 4");
session.execute("DROP TABLE IF EXISTS table_ttl_timestamp");
session.execute("CREATE TABLE table_ttl_timestamp (key int PRIMARY KEY, value text, loaded_at timeuuid)");
List<String> args = Lists.newArrayList("load", "--log.directory", quoteJson(logDir), "--connector.csv.ignoreLeadingWhitespaces", "true", "--connector.csv.ignoreTrailingWhitespaces", "true", "--connector.csv.url", ClassLoader.getSystemResource("ttl-timestamp-unset.csv").toExternalForm(), "--driver.pooling.local.connections", "1", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.query", "insert into table_ttl_timestamp (key, value, loaded_at) values (?, ?, now()) using ttl ? and timestamp ?", "--schema.mapping", quoteJson("*=*, created_at = \"[timestamp]\", time_to_live = \"[ttl]\""));
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
assertTTLAndTimestampUnsetValues();
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method missing_key_with_custom_query.
@Test
void missing_key_with_custom_query() {
List<String> args = new ArrayList<>();
args.add("load");
args.add("--connector.csv.url");
args.add(quoteJson(CsvUtils.CSV_RECORDS_UNIQUE));
args.add("--connector.csv.header");
args.add("false");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.query");
args.add(INSERT_INTO_IP_BY_COUNTRY);
args.add("--schema.mapping");
args.add("0=beginning_ip_address,1=ending_ip_address,2=beginning_ip_number,3=ending_ip_number, 5=country_name");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_ABORTED_FATAL_ERROR);
validateErrorMessageLogged("Missing required primary key column country_code");
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method unload_user_defined_functions_mapping.
@Test
void unload_user_defined_functions_mapping() throws IOException {
assumeTrue(ccm.getCassandraVersion().compareTo(V2_2) >= 0, "User-defined functions are not compatible with C* < 2.2");
session.execute("DROP TABLE IF EXISTS udf_table");
session.execute("CREATE TABLE udf_table (pk int PRIMARY KEY, \"Value 1\" int, \"Value 2\" int)");
session.execute("INSERT INTO udf_table (pk, \"Value 1\", \"Value 2\") VALUES (0,1,2)");
session.execute("DROP FUNCTION IF EXISTS plus");
session.execute("CREATE FUNCTION plus(s int, v int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return s+v;';");
List<String> args = Lists.newArrayList("unload", "--log.directory", quoteJson(logDir), "-header", "true", "--connector.csv.url", quoteJson(unloadDir), "--connector.csv.maxConcurrentFiles", "1", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.table", "udf_table", "--schema.mapping", quoteJson("* = [-pk], SUM = plus(\"Value 1\", \"Value 2\")"));
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
List<String> lines = FileUtils.readAllLinesInDirectoryAsStream(unloadDir).collect(Collectors.toList());
assertThat(lines).containsExactly("SUM,Value 1,Value 2", "3,1,2");
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method should_respect_query_variables_order.
/**
* Test for DAT-253.
*/
@Test
void should_respect_query_variables_order() throws Exception {
session.execute("DROP TABLE IF EXISTS mapping");
session.execute("CREATE TABLE IF NOT EXISTS mapping (key int PRIMARY KEY, value varchar)");
List<String> args = new ArrayList<>();
args.add("load");
args.add("--connector.csv.url");
args.add(ClassLoader.getSystemResource("invalid-mapping.csv").toExternalForm());
args.add("--connector.csv.header");
args.add("false");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.query");
// 0 = value, 1 = key
args.add("INSERT INTO mapping (value, key) VALUES (?, ?)");
ExitStatus loadStatus = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(loadStatus, STATUS_COMPLETED_WITH_ERRORS);
assertThat(logs).hasMessageContaining("At least 1 record does not match the provided schema.mapping or schema.query");
FileUtils.deleteDirectory(logDir);
args = new ArrayList<>();
args.add("unload");
args.add("--connector.csv.url");
args.add(quoteJson(unloadDir));
args.add("--connector.csv.header");
args.add("false");
args.add("--connector.csv.maxConcurrentFiles");
args.add("1");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.query");
// the columns should be exported as they appear in the SELECT clause, so 'value,key' and not
// 'key,value'
args.add("SELECT value, key FROM mapping");
ExitStatus unloadStatus = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(unloadStatus, STATUS_OK);
List<String> lines = FileUtils.readAllLinesInDirectoryAsStream(unloadDir).collect(Collectors.toList());
assertThat(lines).hasSize(2).contains("ok1,1").contains("ok2,2");
}
Aggregations