use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method unload_and_load_timestamp_ttl_case_sensitive_mapping.
@Test
void unload_and_load_timestamp_ttl_case_sensitive_mapping() throws IOException {
session.execute("DROP TABLE IF EXISTS \"UNLOAD_AND_LOAD_TIMESTAMP_TTL\"");
session.execute("CREATE TABLE \"UNLOAD_AND_LOAD_TIMESTAMP_TTL\" (key int PRIMARY KEY, \"My Value\" text)");
session.execute("INSERT INTO \"UNLOAD_AND_LOAD_TIMESTAMP_TTL\" (key, \"My Value\") VALUES (1, 'foo') " + "USING TIMESTAMP 123456789 AND TTL 123456789");
List<String> args = Lists.newArrayList("unload", "--log.directory", quoteJson(logDir), "--connector.csv.url", quoteJson(unloadDir), "--connector.csv.header", "true", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.table", "UNLOAD_AND_LOAD_TIMESTAMP_TTL", "--schema.mapping", quoteJson("key, \"My Value\", writetime(\"My Value\"), ttl(\"My Value\")"));
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
Stream<String> line = FileUtils.readAllLinesInDirectoryAsStreamExcludingHeaders(unloadDir);
assertThat(line).singleElement(InstanceOfAssertFactories.STRING).contains("1,foo,").contains(CodecUtils.numberToInstant(123456789, MICROSECONDS, EPOCH).toString()).containsPattern(",\\d+");
FileUtils.deleteDirectory(logDir);
session.execute("TRUNCATE \"UNLOAD_AND_LOAD_TIMESTAMP_TTL\"");
args = Lists.newArrayList("load", "--log.directory", quoteJson(logDir), "--connector.csv.url", quoteJson(unloadDir), "--connector.csv.header", "true", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.table", "UNLOAD_AND_LOAD_TIMESTAMP_TTL", "--schema.mapping", quoteJson("* = * , \"writetime(My Value)\" = writetime(*), \"ttl(My Value)\" = ttl(*)"));
status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
ResultSet rs = session.execute("SELECT key, \"My Value\", " + "writetime(\"My Value\") AS timestamp, " + "ttl(\"My Value\") AS ttl " + "FROM \"UNLOAD_AND_LOAD_TIMESTAMP_TTL\" WHERE key = 1");
Row row = rs.one();
assertThat(row.getLong("timestamp")).isEqualTo(123456789L);
assertThat(row.getInt("ttl")).isLessThanOrEqualTo(123456789);
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method load_qualified_user_defined_functions_custom_query.
/**
* Test for DAT-378
*/
@Test
void load_qualified_user_defined_functions_custom_query() {
assumeTrue(ccm.getCassandraVersion().compareTo(V2_2) >= 0, "User-defined functions are not compatible with C* < 2.2");
session.execute("DROP TABLE IF EXISTS udf_table");
session.execute("CREATE TABLE udf_table (pk int PRIMARY KEY, \"Value 1\" int, \"Value 2\" int, \"SUM\" int)");
session.execute("DROP KEYSPACE IF EXISTS \"MyKs1\"");
session.execute(CQLUtils.createKeyspaceSimpleStrategy("MyKs1", 1));
session.execute("DROP FUNCTION IF EXISTS \"MyKs1\".plus");
session.execute("CREATE FUNCTION \"MyKs1\".plus(s int, v int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return s+v;';");
MockConnector.mockReads(RecordUtils.mappedCSV("pk", "0", "Value 1", "1", "Value 2", "2"));
List<String> args = Lists.newArrayList("load", "--log.directory", quoteJson(logDir), "--connector.name", "mock", "--connector.csv.maxConcurrentFiles", "1", "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.query", quoteJson("INSERT INTO udf_table " + "(pk, \"Value 1\", \"Value 2\", \"SUM\") " + "VALUES " + "(:pk, :\"Value 1\", :\"Value 2\", \"MyKs1\".plus(:\"Value 1\", :\"Value 2\"))"));
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
Row row = session.execute("SELECT * FROM udf_table").one();
assertThat(row.getInt("pk")).isEqualTo(0);
assertThat(row.getInt("\"Value 1\"")).isEqualTo(1);
assertThat(row.getInt("\"Value 2\"")).isEqualTo(2);
assertThat(row.getInt("\"SUM\"")).isEqualTo(3);
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method load_duplicate_headers.
/**
* Test for duplicate headers (DAT-427).
*/
@Test
void load_duplicate_headers() {
session.execute("DROP TABLE IF EXISTS test_duplicate_headers");
session.execute("CREATE TABLE test_duplicate_headers (pk int, cc int, v int, PRIMARY KEY (pk, cc))");
List<String> args = new ArrayList<>();
args.add("load");
args.add("--connector.csv.url");
args.add(ClassLoader.getSystemResource("bad_header_duplicate.csv").toExternalForm());
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.table");
args.add("test_duplicate_headers");
args.add("--schema.mapping");
args.add("*=*");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_ABORTED_FATAL_ERROR);
assertThat(logs).hasMessageContaining("found duplicate field name at index 1");
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method function_mapped_to_primary_key_with_custom_query.
/**
* Test for DAT-326.
*/
@Test
void function_mapped_to_primary_key_with_custom_query() {
session.execute("DROP TABLE IF EXISTS dat326b");
session.execute("CREATE TABLE IF NOT EXISTS dat326b (pk int, cc timeuuid, v int, PRIMARY KEY (pk, cc))");
List<String> args = Lists.newArrayList("load", "--log.directory", quoteJson(logDir), "-header", "true", "--connector.csv.url", quoteJson(getClass().getResource("/function-pk.csv")), "--schema.keyspace", session.getKeyspace().get().asInternal(), "--schema.query", "INSERT INTO dat326b (pk, cc, v) VALUES (:pk, now(), :v)");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_OK);
}
use of com.datastax.oss.dsbulk.runner.ExitStatus in project dsbulk by datastax.
the class CSVConnectorEndToEndCCMIT method load_empty_headers.
/**
* Test for empty headers (DAT-427).
*/
@Test
void load_empty_headers() {
session.execute("DROP TABLE IF EXISTS test_empty_headers");
session.execute("CREATE TABLE test_empty_headers (pk int, cc int, v int, PRIMARY KEY (pk, cc))");
List<String> args = new ArrayList<>();
args.add("load");
args.add("--connector.csv.url");
args.add(ClassLoader.getSystemResource("bad_header_empty.csv").toExternalForm());
args.add("--schema.keyspace");
args.add(session.getKeyspace().get().asInternal());
args.add("--schema.table");
args.add("test_empty_headers");
args.add("--schema.mapping");
args.add("*=*");
ExitStatus status = new DataStaxBulkLoader(addCommonSettings(args)).run();
assertStatus(status, STATUS_ABORTED_FATAL_ERROR);
assertThat(logs).hasMessageContaining("found empty field name at index 1");
}
Aggregations