use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.
the class CSVConnectorTest method should_honor_emptyValue_when_writing2.
@Test
void should_honor_emptyValue_when_writing2() throws Exception {
Path out = Files.createTempDirectory("test");
CSVConnector connector = new CSVConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.csv", "url", StringUtils.quoteJson(out), "emptyValue", "NULL", "header", false);
connector.configure(settings, false, true);
connector.init();
Flux.<Record>just(DefaultRecord.indexed("source", resource, IRRELEVANT_POSITION, "")).transform(connector.write()).blockLast();
connector.close();
List<String> actual = Files.readAllLines(out.resolve("output-000001.csv"));
assertThat(actual).hasSize(1).containsExactly("NULL");
}
use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.
the class CSVConnectorTest method should_honor_ignoreLeadingWhitespacesInQuotes_and_ignoreTrailingWhitespacesInQuotes2.
@Test
void should_honor_ignoreLeadingWhitespacesInQuotes_and_ignoreTrailingWhitespacesInQuotes2() throws Exception {
Path file = Files.createTempFile("test", ".csv");
Files.write(file, Collections.singleton("\" foo \""));
CSVConnector connector = new CSVConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.csv", "url", StringUtils.quoteJson(file), "ignoreLeadingWhitespacesInQuotes", true, "ignoreTrailingWhitespacesInQuotes", true, "header", false);
connector.configure(settings, true, true);
connector.init();
List<Record> records = Flux.merge(connector.read()).collectList().block();
assertThat(records).hasSize(1);
assertThat(records.get(0).getFieldValue(new DefaultIndexedField(0))).isEqualTo("foo");
connector.close();
}
use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.
the class CSVConnectorTest method should_honor_nullValue_when_reading.
@ParameterizedTest
@MethodSource
void should_honor_nullValue_when_reading(String nullValue, String expected) throws Exception {
Path file = Files.createTempFile("test", ".csv");
Files.write(file, Collections.singleton(","));
CSVConnector connector = new CSVConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.csv", "url", StringUtils.quoteJson(file), "nullValue", StringUtils.quoteJson(nullValue), "header", false);
connector.configure(settings, true, true);
connector.init();
List<Record> records = Flux.merge(connector.read()).collectList().block();
assertThat(records).hasSize(1);
assertThat(records.get(0).getFieldValue(new DefaultIndexedField(0))).isEqualTo(expected);
connector.close();
}
use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.
the class CSVConnectorTest method should_honor_ignoreLeadingWhitespaces_and_ignoreTrailingWhitespaces_when_writing.
@Test
void should_honor_ignoreLeadingWhitespaces_and_ignoreTrailingWhitespaces_when_writing() throws Exception {
Path out = Files.createTempDirectory("test");
CSVConnector connector = new CSVConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.csv", "url", StringUtils.quoteJson(out), "ignoreLeadingWhitespaces", false, "ignoreTrailingWhitespaces", false, "maxConcurrentFiles", 1, "header", false);
connector.configure(settings, false, true);
connector.init();
Flux.<Record>just(DefaultRecord.indexed("source", resource, IRRELEVANT_POSITION, " foo ")).transform(connector.write()).blockFirst();
connector.close();
List<String> actual = Files.readAllLines(out.resolve("output-000001.csv"));
assertThat(actual).hasSize(1).containsExactly(" foo ");
}
use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.
the class CSVConnectorTest method should_honor_ignoreLeadingWhitespaces_and_ignoreTrailingWhitespaces_when_writing2.
@Test
void should_honor_ignoreLeadingWhitespaces_and_ignoreTrailingWhitespaces_when_writing2() throws Exception {
Path out = Files.createTempDirectory("test");
CSVConnector connector = new CSVConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.csv", "url", StringUtils.quoteJson(out), "ignoreLeadingWhitespaces", true, "ignoreTrailingWhitespaces", true, "header", false);
connector.configure(settings, false, true);
connector.init();
Flux.<Record>just(DefaultRecord.indexed("source", resource, IRRELEVANT_POSITION, " foo ")).transform(connector.write()).blockLast();
connector.close();
List<String> actual = Files.readAllLines(out.resolve("output-000001.csv"));
assertThat(actual).hasSize(1).containsExactly("foo");
}
Aggregations