use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method literal_mapped_to_primary_key_with_custom_query.
/**
* Test for DAT-326.
*/
@Test
void literal_mapped_to_primary_key_with_custom_query() {
session.execute("DROP TABLE IF EXISTS dat326d");
session.execute("CREATE TABLE IF NOT EXISTS dat326d (pk int, cc int, v int, PRIMARY KEY (pk, cc))");
List<String> args = Lists.newArrayList("load", "--log.directory", quoteJson(logDir), "-header", "true", "--connector.csv.url", quoteJson(getClass().getResource("/function-pk.csv")), "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.query", "INSERT INTO dat326d (pk, cc, v) VALUES (:pk, 42, :v)");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method full_load_unload_counters_custom_query_positional.
@Test
void full_load_unload_counters_custom_query_positional() throws IOException {
assumeTrue((ccm.getClusterType() == Type.DSE && ccm.getVersion().compareTo(V5_1) >= 0) || (ccm.getClusterType() == OSS && ccm.getVersion().compareTo(V3_10) >= 0), "UPDATE SET += syntax is only supported in C* 3.10+ and DSE 5.1+");
session.execute("DROP TABLE IF EXISTS counters");
session.execute("CREATE TABLE counters (" + "pk1 int, " + "\"PK2\" int, " + "\"C1\" counter, " + "c2 counter, " + "c3 counter, " + "PRIMARY KEY (pk1, \"PK2\"))");
List<String> args = new ArrayList<>();
args.add("load");
args.add("--connector.csv.url");
args.add(quoteJson(getClass().getResource("/counters.csv")));
args.add("--connector.csv.header");
args.add("false");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.query");
args.add(quoteJson("UPDATE counters SET \"C1\" += ?, c2 = c2 + ? WHERE pk1 = ? AND \"PK2\" = ?"));
args.add("--schema.mapping");
args.add("pk1,PK2,C1,c2");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
Row row = session.execute("SELECT \"C1\", c2, c3 FROM counters WHERE pk1 = 1 AND \"PK2\" = 2").one();
assertThat(row.getLong("\"C1\"")).isEqualTo(42L);
// present in the file
assertThat(row.getLong("c2")).isZero();
// not present in the file
assertThat(row.isNull("c3")).isTrue();
FileUtils.deleteDirectory(logDir);
args = new ArrayList<>();
args.add("unload");
args.add("--connector.csv.url");
args.add(quoteJson(unloadDir));
args.add("--connector.csv.header");
args.add("true");
args.add("--connector.csv.maxConcurrentFiles");
args.add("1");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.query");
// Exercise aliased selectors and a custom mapping
args.add(quoteJson("SELECT pk1 as \"Field A\", \"PK2\" AS \"Field B\", \"C1\" AS \"Field C\", " + "c2 AS \"Field D\", c3 AS \"Field E\" FROM counters"));
args.add("--schema.mapping");
args.add(quoteJson("\"Field D\",\"Field C\",\"Field B\",\"Field A\""));
status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
validateOutputFiles(2, unloadDir);
assertThat(FileUtils.readAllLinesInDirectoryAsStream(unloadDir)).containsExactly("Field A,Field B,Field C,Field D", "1,2,42,0");
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method load_qualified_user_defined_functions_mapping.
/**
* Test for DAT-379
*/
@Test
void load_qualified_user_defined_functions_mapping() {
assumeTrue(ccm.getCassandraVersion().compareTo(V2_2) >= 0, "User-defined functions are not compatible with C* < 2.2");
session.execute("DROP TABLE IF EXISTS udf_table");
session.execute("CREATE TABLE udf_table (pk int PRIMARY KEY, \"Value 1\" int, \"Value 2\" int, \"SUM\" int)");
session.execute("DROP KEYSPACE IF EXISTS \"MyKs1\"");
session.execute(CQLUtils.createKeyspaceSimpleStrategy("MyKs1", 1));
session.execute("DROP FUNCTION IF EXISTS \"MyKs1\".plus");
session.execute("CREATE FUNCTION \"MyKs1\".plus(s int, v int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return s+v;';");
MockConnector.mockReads(RecordUtils.mappedCSV("pk", "0", "Value 1", "1", "Value 2", "2"));
List<String> args = Lists.newArrayList("load", "--log.directory", quoteJson(logDir), "--connector.name", "mock", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.table", "udf_table", "--schema.mapping", quoteJson("* = *, \"MyKs1\".plus(\"Value 1\", \"Value 2\") = SUM"));
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
Row row = session.execute("SELECT * FROM udf_table").one();
assertThat(row.getInt("pk")).isEqualTo(0);
assertThat(row.getInt("\"Value 1\"")).isEqualTo(1);
assertThat(row.getInt("\"Value 2\"")).isEqualTo(2);
assertThat(row.getInt("\"SUM\"")).isEqualTo(3);
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method full_load_unload_counters.
/**
* Attempts to load and unload counter types.
*
* @jira_ticket DAT-292
*/
@Test
void full_load_unload_counters() throws Exception {
session.execute("DROP TABLE IF EXISTS counters");
session.execute("CREATE TABLE counters (" + "pk1 int, " + "\"PK2\" int, " + "\"C1\" counter, " + "c2 counter, " + "c3 counter, " + "PRIMARY KEY (pk1, \"PK2\"))");
URL resource = getClass().getResource("/counters.csv");
List<String> args = new ArrayList<>();
args.add("load");
args.add("--connector.csv.url");
args.add(quoteJson(resource));
args.add("--connector.csv.header");
args.add("false");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.table");
args.add("counters");
args.add("--schema.mapping");
args.add("pk1,PK2,C1,c2");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
validatePositionsFile(resource, 1);
Row row = session.execute("SELECT \"C1\", c2, c3 FROM counters WHERE pk1 = 1 AND \"PK2\" = 2").one();
assertThat(row.getLong("\"C1\"")).isEqualTo(42L);
// present in the file
assertThat(row.getLong("c2")).isZero();
// not present in the file
assertThat(row.isNull("c3")).isTrue();
FileUtils.deleteDirectory(logDir);
args = new ArrayList<>();
args.add("unload");
args.add("--connector.csv.url");
args.add(quoteJson(unloadDir));
args.add("--connector.csv.header");
args.add("false");
args.add("--connector.csv.maxConcurrentFiles");
args.add("1");
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.table");
args.add("counters");
args.add("--schema.mapping");
args.add("pk1,PK2,C1,c2,c3");
status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
validateOutputFiles(1, unloadDir);
assertThat(FileUtils.readAllLinesInDirectoryAsStream(unloadDir)).containsExactly("1,2,42,0,");
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method unload_with_custom_query_and_function_with_header.
@Test
void unload_with_custom_query_and_function_with_header() throws IOException {
assumeTrue(ccm.getCassandraVersion().compareTo(V3) >= 0, "CQL function toDate is not compatible with C* < 3.0");
session.execute("DROP TABLE IF EXISTS unload_with_function1");
session.execute("CREATE TABLE IF NOT EXISTS unload_with_function1 (pk int, cc timeuuid, v int, PRIMARY KEY (pk, cc))");
session.execute("INSERT INTO unload_with_function1 (pk, cc, v) values (0, now(), 1)");
List<String> args = Lists.newArrayList("unload", "--log.directory", quoteJson(logDir), "-header", "true", "--connector.csv.url", quoteJson(unloadDir), "--connector.csv.maxConcurrentFiles", "1", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.query", "SELECT pk, v, toDate(cc) AS date_created FROM unload_with_function1");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
List<String> lines = FileUtils.readAllLinesInDirectoryAsStream(unloadDir).collect(Collectors.toList());
assertThat(lines).hasSize(2);
assertThat(lines.get(0)).isEqualTo("pk,v,date_created");
assertThat(lines.get(1)).matches("0,1,\\d{4}-\\d{2}-\\d{2}");
}
Aggregations