use of com.datastax.oss.dsbulk.connectors.api.Field in project dsbulk by datastax.
the class JsonConnectorTest method createRecords.
private List<Record> createRecords(boolean retainRecordSources, URI resource) {
ArrayList<Record> records = new ArrayList<>();
Field[] fields = new Field[5];
fields[0] = new DefaultMappedField("Year");
fields[1] = new DefaultMappedField("Make");
fields[2] = new DefaultMappedField("Model");
fields[3] = new DefaultMappedField("Description");
fields[4] = new DefaultMappedField("Price");
JsonNode source1;
JsonNode source2;
JsonNode source3;
JsonNode source4;
JsonNode source5;
try {
source1 = objectMapper.readTree("{" + "\"Year\": 1997,\n" + "\"Make\": \"Ford\",\n" + "\"Model\": \"E350\",\n" + "\"Description\": \"ac, abs, moon\",\n" + "\"Price\": 3000.0\n" + "}");
source2 = objectMapper.readTree("{\n" + "\"Year\": 1999,\n" + "\"Make\": \"Chevy\",\n" + "\"Model\": \"Venture \\\"Extended Edition\\\"\",\n" + "\"Description\": null,\n" + "\"Price\": 4900.0\n" + "}");
source3 = objectMapper.readTree("{\n" + "\"Year\": 1996,\n" + "\"Make\": \"Jeep\",\n" + "\"Model\": \"Grand Cherokee\",\n" + "\"Description\": \"MUST SELL!\\nair, moon roof, loaded\",\n" + "\"Price\": 4799.0\n" + "}");
source4 = objectMapper.readTree("{\n" + "\"Year\": 1999,\n" + "\"Make\": \"Chevy\",\n" + "\"Model\": \"Venture \\\"Extended Edition, Very Large\\\"\",\n" + "\"Description\": null,\n" + "\"Price\": 5000.0\n" + "}");
source5 = objectMapper.readTree("{\n" + "\"Year\": null,\n" + "\"Make\": null,\n" + "\"Model\": \"Venture \\\"Extended Edition\\\"\",\n" + "\"Description\": null,\n" + "\"Price\": 4900.0\n" + "}");
} catch (JsonProcessingException e) {
throw new UncheckedIOException(e);
}
records.add(DefaultRecord.mapped(retainRecordSources ? source1 : null, resource, 1, fields, source1.get("Year"), source1.get("Make"), source1.get("Model"), source1.get("Description"), source1.get("Price")));
records.add(DefaultRecord.mapped(retainRecordSources ? source2 : null, resource, 2, fields, source2.get("Year"), source2.get("Make"), source2.get("Model"), source2.get("Description"), source2.get("Price")));
records.add(DefaultRecord.mapped(retainRecordSources ? source3 : null, resource, 3, fields, source3.get("Year"), source3.get("Make"), source3.get("Model"), source3.get("Description"), source3.get("Price")));
records.add(DefaultRecord.mapped(retainRecordSources ? source4 : null, resource, 4, fields, source4.get("Year"), source4.get("Make"), source4.get("Model"), source4.get("Description"), source4.get("Price")));
records.add(DefaultRecord.mapped(retainRecordSources ? source5 : null, resource, 5, fields, source5.get("Year"), source5.get("Make"), source5.get("Model"), source5.get("Description"), source5.get("Price")));
return records;
}
use of com.datastax.oss.dsbulk.connectors.api.Field in project dsbulk by datastax.
the class CSVConnectorTest method should_honor_nullValue_when_writing.
@ParameterizedTest
@MethodSource
void should_honor_nullValue_when_writing(String nullValue, String expected) throws Exception {
Path out = Files.createTempDirectory("test");
CSVConnector connector = new CSVConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.csv", "url", StringUtils.quoteJson(out), "nullValue", StringUtils.quoteJson(nullValue), "header", false);
connector.configure(settings, false, false);
connector.init();
Flux.<Record>just(DefaultRecord.mapped("source", resource, IRRELEVANT_POSITION, new Field[] { new DefaultMappedField("field1"), new DefaultMappedField("field2") }, null, "field2")).transform(connector.write()).blockLast();
connector.close();
List<String> actual = Files.readAllLines(out.resolve("output-000001.csv"));
assertThat(actual).hasSize(1).containsExactly(expected);
}
Aggregations