use of com.datastax.oss.dsbulk.connectors.api.DefaultMappedField in project dsbulk by datastax.
the class SchemaSettingsTest method assertMapping.
private static void assertMapping(DefaultMapping mapping, Object... fieldsAndVars) {
ImmutableSetMultimap.Builder<Object, Object> expected = ImmutableSetMultimap.builder();
for (int i = 0; i < fieldsAndVars.length; i += 2) {
String first = fieldsAndVars[i] instanceof String ? (String) fieldsAndVars[i] : ((CqlIdentifier) fieldsAndVars[i]).asInternal();
CQLWord second = fieldsAndVars[i + 1] instanceof String ? CQLWord.fromInternal((String) fieldsAndVars[i + 1]) : CQLWord.fromCqlIdentifier((CqlIdentifier) fieldsAndVars[i + 1]);
if (CharMatcher.inRange('0', '9').matchesAllOf(first)) {
expected.put(new DefaultIndexedField(Integer.parseInt(first)), second);
} else {
expected.put(new DefaultMappedField(first), second);
}
}
@SuppressWarnings("unchecked") SetMultimap<Field, CQLWord> fieldsToVariables = (SetMultimap<Field, CQLWord>) getInternalState(mapping, "fieldsToVariables");
assertThat(fieldsToVariables).isEqualTo(expected.build());
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultMappedField in project dsbulk by datastax.
the class JsonConnectorTest method should_read_from_stdin_with_special_encoding.
@Test
void should_read_from_stdin_with_special_encoding() throws Exception {
InputStream stdin = System.in;
try {
String line = "{ \"fóô\" : \"bàr\", \"qïx\" : null }\n";
InputStream is = new ByteArrayInputStream(line.getBytes(ISO_8859_1));
System.setIn(is);
JsonConnector connector = new JsonConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "encoding", "ISO-8859-1");
connector.configure(settings, true, true);
connector.init();
assertThat(connector.readConcurrency()).isOne();
assertThat(ReflectionUtils.invokeMethod("isDataSizeSamplingAvailable", connector, Boolean.TYPE)).isFalse();
List<Record> actual = Flux.merge(connector.read()).collectList().block();
assertThat(actual).hasSize(1);
assertThat(actual.get(0).getSource()).isEqualTo(objectMapper.readTree(line));
assertThat(actual.get(0).getResource()).isEqualTo(URI.create("std:/"));
assertThat(actual.get(0).getPosition()).isEqualTo(1L);
assertThat(actual.get(0).getFieldValue(new DefaultMappedField("fóô"))).isEqualTo(factory.textNode("bàr"));
assertThat(actual.get(0).getFieldValue(new DefaultMappedField("qïx"))).isEqualTo(factory.nullNode());
connector.close();
} finally {
System.setIn(stdin);
}
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultMappedField in project dsbulk by datastax.
the class JsonConnectorTest method createRecords.
private List<Record> createRecords(boolean retainRecordSources, URI resource) {
ArrayList<Record> records = new ArrayList<>();
Field[] fields = new Field[5];
fields[0] = new DefaultMappedField("Year");
fields[1] = new DefaultMappedField("Make");
fields[2] = new DefaultMappedField("Model");
fields[3] = new DefaultMappedField("Description");
fields[4] = new DefaultMappedField("Price");
JsonNode source1;
JsonNode source2;
JsonNode source3;
JsonNode source4;
JsonNode source5;
try {
source1 = objectMapper.readTree("{" + "\"Year\": 1997,\n" + "\"Make\": \"Ford\",\n" + "\"Model\": \"E350\",\n" + "\"Description\": \"ac, abs, moon\",\n" + "\"Price\": 3000.0\n" + "}");
source2 = objectMapper.readTree("{\n" + "\"Year\": 1999,\n" + "\"Make\": \"Chevy\",\n" + "\"Model\": \"Venture \\\"Extended Edition\\\"\",\n" + "\"Description\": null,\n" + "\"Price\": 4900.0\n" + "}");
source3 = objectMapper.readTree("{\n" + "\"Year\": 1996,\n" + "\"Make\": \"Jeep\",\n" + "\"Model\": \"Grand Cherokee\",\n" + "\"Description\": \"MUST SELL!\\nair, moon roof, loaded\",\n" + "\"Price\": 4799.0\n" + "}");
source4 = objectMapper.readTree("{\n" + "\"Year\": 1999,\n" + "\"Make\": \"Chevy\",\n" + "\"Model\": \"Venture \\\"Extended Edition, Very Large\\\"\",\n" + "\"Description\": null,\n" + "\"Price\": 5000.0\n" + "}");
source5 = objectMapper.readTree("{\n" + "\"Year\": null,\n" + "\"Make\": null,\n" + "\"Model\": \"Venture \\\"Extended Edition\\\"\",\n" + "\"Description\": null,\n" + "\"Price\": 4900.0\n" + "}");
} catch (JsonProcessingException e) {
throw new UncheckedIOException(e);
}
records.add(DefaultRecord.mapped(retainRecordSources ? source1 : null, resource, 1, fields, source1.get("Year"), source1.get("Make"), source1.get("Model"), source1.get("Description"), source1.get("Price")));
records.add(DefaultRecord.mapped(retainRecordSources ? source2 : null, resource, 2, fields, source2.get("Year"), source2.get("Make"), source2.get("Model"), source2.get("Description"), source2.get("Price")));
records.add(DefaultRecord.mapped(retainRecordSources ? source3 : null, resource, 3, fields, source3.get("Year"), source3.get("Make"), source3.get("Model"), source3.get("Description"), source3.get("Price")));
records.add(DefaultRecord.mapped(retainRecordSources ? source4 : null, resource, 4, fields, source4.get("Year"), source4.get("Make"), source4.get("Model"), source4.get("Description"), source4.get("Price")));
records.add(DefaultRecord.mapped(retainRecordSources ? source5 : null, resource, 5, fields, source5.get("Year"), source5.get("Make"), source5.get("Model"), source5.get("Description"), source5.get("Price")));
return records;
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultMappedField in project dsbulk by datastax.
the class CSVConnectorTest method should_honor_multi_char_delimiter.
@Test
void should_honor_multi_char_delimiter() throws Exception {
CSVConnector connector = new CSVConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.csv", "url", url("/multi-char-delimiter.csv"), "delimiter", "\"||\"", "ignoreLeadingWhitespaces", true, "ignoreTrailingWhitespaces", true, "header", true);
connector.configure(settings, true, true);
connector.init();
List<Record> records = Flux.merge(connector.read()).collectList().block();
assertThat(records).hasSize(1);
Record record = records.get(0);
assertThat(record.fields()).hasSize(6);
assertThat(record.getFieldValue(new DefaultIndexedField(0))).isEqualTo("foo");
assertThat(record.getFieldValue(new DefaultIndexedField(1))).isEqualTo("|bar|");
assertThat(record.getFieldValue(new DefaultIndexedField(2))).isEqualTo("foo||bar");
assertThat(record.getFieldValue(new DefaultMappedField("field A"))).isEqualTo("foo");
assertThat(record.getFieldValue(new DefaultMappedField("field B"))).isEqualTo("|bar|");
assertThat(record.getFieldValue(new DefaultMappedField("field C"))).isEqualTo("foo||bar");
connector.close();
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultMappedField in project dsbulk by datastax.
the class CSVConnectorTest method should_honor_nullValue_when_writing.
@ParameterizedTest
@MethodSource
void should_honor_nullValue_when_writing(String nullValue, String expected) throws Exception {
Path out = Files.createTempDirectory("test");
CSVConnector connector = new CSVConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.csv", "url", StringUtils.quoteJson(out), "nullValue", StringUtils.quoteJson(nullValue), "header", false);
connector.configure(settings, false, false);
connector.init();
Flux.<Record>just(DefaultRecord.mapped("source", resource, IRRELEVANT_POSITION, new Field[] { new DefaultMappedField("field1"), new DefaultMappedField("field2") }, null, "field2")).transform(connector.write()).blockLast();
connector.close();
List<String> actual = Files.readAllLines(out.resolve("output-000001.csv"));
assertThat(actual).hasSize(1).containsExactly(expected);
}
Aggregations