use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.
the class JsonConnectorTest method should_read_single_file_single_doc.
@ParameterizedTest
@ValueSource(booleans = { true, false })
void should_read_single_file_single_doc(boolean retainRecordSources) throws Exception {
JsonConnector connector = new JsonConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "url", url("/single_doc.json"), "parserFeatures", "{ALLOW_COMMENTS:true}", "deserializationFeatures", "{USE_BIG_DECIMAL_FOR_FLOATS : false}", "mode", "SINGLE_DOCUMENT");
connector.configure(settings, true, retainRecordSources);
connector.init();
assertThat(connector.readConcurrency()).isOne();
List<Record> actual = Flux.merge(connector.read()).collectList().block();
verifyRecords(actual, retainRecordSources, rawURL("/single_doc.json").toURI());
connector.close();
}
use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.
the class JsonConnectorTest method should_write_to_stdout_with_special_encoding.
@Test
void should_write_to_stdout_with_special_encoding() throws Exception {
PrintStream stdout = System.out;
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream out = new PrintStream(baos);
System.setOut(out);
JsonConnector connector = new JsonConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "encoding", "ISO-8859-1");
connector.configure(settings, false, true);
connector.init();
assertThat(connector.writeConcurrency()).isOne();
assertThat(ReflectionUtils.invokeMethod("isDataSizeSamplingAvailable", connector, Boolean.TYPE)).isFalse();
Flux.<Record>just(DefaultRecord.indexed("source", resource, 1, factory.textNode("fóô"), factory.textNode("bàr"), factory.textNode("qïx"))).transform(connector.write()).blockLast();
connector.close();
assertThat(new String(baos.toByteArray(), ISO_8859_1)).isEqualTo("{\"0\":\"fóô\",\"1\":\"bàr\",\"2\":\"qïx\"}" + System.lineSeparator());
} finally {
System.setOut(stdout);
}
}
use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.
the class JsonConnectorTest method should_honor_max_records_and_skip_records2.
@Test
void should_honor_max_records_and_skip_records2() throws Exception {
JsonConnector connector = new JsonConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "url", url("/root/ip-by-country-sample1.json"), "skipRecords", 10, "maxRecords", 1);
connector.configure(settings, true, true);
connector.init();
List<Record> records = Flux.merge(connector.read()).collectList().block();
assertThat(records).hasSize(1);
assertThat(records.get(0).getSource().toString().trim()).isEqualTo("{\"beginning IP Address\":\"212.63.180.20\",\"ending IP Address\":\"212.63.180.23\",\"beginning IP Number\":3560944660,\"ending IP Number\":3560944663,\"ISO 3166 Country Code\":\"MZ\",\"Country Name\":\"Mozambique\"}");
connector.close();
}
use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.
the class JsonConnectorTest method should_read_single_empty_file_multi_doc.
@Test
void should_read_single_empty_file_multi_doc() throws Exception {
JsonConnector connector = new JsonConnector();
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "url", url("/empty.json"), "parserFeatures", "{ALLOW_COMMENTS:true}", "mode", "MULTI_DOCUMENT");
connector.configure(settings, true, true);
connector.init();
assertThat(connector.readConcurrency()).isOne();
// should complete with 0 records.
List<Record> actual = Flux.merge(connector.read()).collectList().block();
assertThat(actual).hasSize(0);
connector.close();
}
use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.
the class JsonConnectorTest method should_read_from_http_url.
@ParameterizedTest
@ValueSource(booleans = { true, false })
void should_read_from_http_url(boolean retainRecordSources, @Wiremock WireMockServer server) throws Exception {
server.givenThat(any(urlPathEqualTo("/file.json")).willReturn(aResponse().withStatus(200).withHeader("Content-Type", "text/json").withBody(readFile(path("/single_doc.json")))));
JsonConnector connector = new JsonConnector();
String url = String.format("%s/file.json", server.baseUrl());
Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "url", quoteJson(url), "mode", "SINGLE_DOCUMENT", "parserFeatures", "{ALLOW_COMMENTS:true}", "deserializationFeatures", "{USE_BIG_DECIMAL_FOR_FLOATS : false}");
connector.configure(settings, true, retainRecordSources);
connector.init();
assertThat(connector.readConcurrency()).isOne();
List<Record> actual = Flux.merge(connector.read()).collectList().block();
verifyRecords(actual, retainRecordSources, new URI(url));
connector.close();
}
Aggregations