use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method unload_qualified_user_defined_functions_mapping.
/**
* Test for DAT-378
*/
@Test
void unload_qualified_user_defined_functions_mapping() throws IOException {
assumeTrue(ccm.getCassandraVersion().compareTo(V2_2) >= 0, "User-defined functions are not compatible with C* < 2.2");
session.execute("DROP TABLE IF EXISTS udf_table");
session.execute("CREATE TABLE udf_table (pk int PRIMARY KEY, \"Value 1\" int, \"Value 2\" int)");
session.execute("INSERT INTO udf_table (pk, \"Value 1\", \"Value 2\") VALUES (0,1,2)");
session.execute("DROP KEYSPACE IF EXISTS \"MyKs1\"");
session.execute(CQLUtils.createKeyspaceSimpleStrategy("MyKs1", 1));
session.execute("DROP FUNCTION IF EXISTS \"MyKs1\".plus");
session.execute("CREATE FUNCTION \"MyKs1\".plus(s int, v int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return s+v;';");
List<String> args = Lists.newArrayList("unload", "--log.directory", quoteJson(logDir), "-header", "true", "--connector.csv.url", quoteJson(unloadDir), "--connector.csv.maxConcurrentFiles", "1", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.table", "udf_table", "--schema.mapping", quoteJson("* = [-pk], SUM = \"MyKs1\".plus(\"Value 1\", \"Value 2\")"));
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
List<String> lines = FileUtils.readAllLinesInDirectoryAsStream(unloadDir).collect(Collectors.toList());
assertThat(lines).containsExactly("SUM,Value 1,Value 2", "3,1,2");
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method load_from_stdin_and_unload.
/**
* Test for DAT-612
*/
@Test
void load_from_stdin_and_unload() throws Exception {
InputStream stdin = System.in;
try {
System.setIn(CsvUtils.CSV_RECORDS.openStream());
List<String> args = new ArrayList<>();
args.add("load");
args.add("--connector.csv.url");
args.add("-");
args.add("--connector.csv.header");
args.add("true");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.table");
args.add("ip_by_country");
args.add("--schema.mapping");
args.add(IP_BY_COUNTRY_MAPPING_INDEXED);
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
validateResultSetSize(500, "SELECT * FROM ip_by_country");
validatePositionsFile(new URL("std:/"), 500);
FileUtils.deleteDirectory(logDir);
args = new ArrayList<>();
args.add("unload");
args.add("--connector.csv.url");
args.add("-");
args.add("--connector.csv.header");
args.add("false");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.table");
args.add("ip_by_country");
args.add("--schema.mapping");
args.add(IP_BY_COUNTRY_MAPPING_INDEXED);
status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
assertThat(stdout.getStreamLines()).hasSize(500);
} finally {
System.setIn(stdin);
}
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method load_ttl_timestamp_now_in_query_and_mapping_positional_internal_names.
@Test
void load_ttl_timestamp_now_in_query_and_mapping_positional_internal_names() {
session.execute("DROP TABLE IF EXISTS table_ttl_timestamp");
session.execute("CREATE TABLE table_ttl_timestamp (key int PRIMARY KEY, value text, loaded_at timeuuid)");
List<String> args = Lists.newArrayList("load", "--log.directory", quoteJson(logDir), "--connector.csv.ignoreLeadingWhitespaces", "true", "--connector.csv.ignoreTrailingWhitespaces", "true", "--connector.csv.url", ClassLoader.getSystemResource("ttl-timestamp.csv").toExternalForm(), "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.query", "insert into table_ttl_timestamp (key, value, loaded_at) values (?, ?, now()) using ttl ? and timestamp ?", "--schema.mapping", // using internal names directly in the mapping should work too
quoteJson("*=*, created_at = \"[timestamp]\", time_to_live = \"[ttl]\""));
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
assertTTLAndTimestamp();
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method delete_column_with_custom_query.
@Test
void delete_column_with_custom_query() {
session.execute("DROP TABLE IF EXISTS test_delete");
session.execute("CREATE TABLE IF NOT EXISTS test_delete (pk int, cc int, value int, PRIMARY KEY (pk, cc))");
session.execute("INSERT INTO test_delete (pk, cc, value) VALUES (1,1,1)");
session.execute("INSERT INTO test_delete (pk, cc, value) VALUES (1,2,2)");
MockConnector.mockReads(RecordUtils.mappedCSV("pk", "1", "cc", "1"));
List<String> args = new ArrayList<>();
args.add("load");
args.add("--connector.name");
args.add("mock");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.query");
args.add("DELETE value FROM test_delete WHERE pk = ? and cc = ?");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
ResultSet rs1 = session.execute("SELECT value FROM test_delete WHERE pk = 1 AND cc = 1");
Row row1 = rs1.one();
assertThat(row1).isNotNull();
assertThat(row1.isNull(0)).isTrue();
ResultSet rs2 = session.execute("SELECT value FROM test_delete WHERE pk = 1 AND cc = 2");
Row row2 = rs2.one();
assertThat(row2).isNotNull();
assertThat(row2.isNull(0)).isFalse();
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class PlainTextAuthEndToEndCCMIT method full_load_unload_legacy_settings.
@ParameterizedTest(name = "[{index}] inferAuthProvider = {0}")
@ValueSource(strings = { "true", "false" })
void full_load_unload_legacy_settings(boolean inferAuthProvider) throws Exception {
List<String> args = new ArrayList<>();
args.add("load");
args.add("--connector.csv.url");
args.add(StringUtils.quoteJson(CsvUtils.CSV_RECORDS_UNIQUE));
args.add("--connector.csv.header");
args.add("false");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.table");
args.add("ip_by_country");
args.add("--schema.mapping");
args.add(IP_BY_COUNTRY_MAPPING_INDEXED);
if (!inferAuthProvider) {
args.add("--driver.auth.provider");
args.add("PlainTextAuthProvider");
}
args.add("--driver.auth.username");
args.add("cassandra");
args.add("--driver.auth.password");
args.add("cassandra");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
validateResultSetSize(24, "SELECT * FROM ip_by_country");
assertThat(logs).hasMessageContaining("Setting dsbulk.driver.auth.* is deprecated and will be removed in a future release; " + "please configure the driver directly using " + "--datastax-java-driver.advanced.auth-provider.* instead");
assertThat(stderr.getStreamAsString()).contains("Setting dsbulk.driver.auth.* is deprecated and will be removed in a future release; " + "please configure the driver directly using " + "--datastax-java-driver.advanced.auth-provider.* instead");
if (inferAuthProvider) {
assertThat(logs).hasMessageContaining("Username and password provided but auth provider not specified, " + "inferring PlainTextAuthProvider");
assertThat(stderr.getStreamAsString()).contains("Username and password provided but auth provider not specified, " + "inferring PlainTextAuthProvider");
}
FileUtils.deleteDirectory(logDir);
logs.clear();
args = new ArrayList<>();
args.add("unload");
args.add("--connector.csv.url");
args.add(StringUtils.quoteJson(unloadDir));
args.add("--connector.csv.header");
args.add("false");
args.add("--connector.csv.maxConcurrentFiles");
args.add("1");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.table");
args.add("ip_by_country");
if (!inferAuthProvider) {
args.add("--driver.auth.provider");
args.add("PlainTextAuthProvider");
}
args.add("--driver.auth.username");
args.add("cassandra");
args.add("--driver.auth.password");
args.add("cassandra");
status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
validateOutputFiles(24, unloadDir);
assertThat(logs).hasMessageContaining("Setting dsbulk.driver.auth.* is deprecated and will be removed in a future release; " + "please configure the driver directly using " + "--datastax-java-driver.advanced.auth-provider.* instead");
assertThat(stderr.getStreamAsString()).contains("Setting dsbulk.driver.auth.* is deprecated and will be removed in a future release; " + "please configure the driver directly using " + "--datastax-java-driver.advanced.auth-provider.* instead");
if (inferAuthProvider) {
assertThat(logs).hasMessageContaining("Username and password provided but auth provider not specified, " + "inferring PlainTextAuthProvider");
assertThat(stderr.getStreamAsString()).contains("Username and password provided but auth provider not specified, " + "inferring PlainTextAuthProvider");
}
}
Aggregations