Search in sources :

Example 16 with Record

use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.

the class JsonConnectorTest method should_read_single_file_single_doc.

@ParameterizedTest
@ValueSource(booleans = { true, false })
void should_read_single_file_single_doc(boolean retainRecordSources) throws Exception {
    JsonConnector connector = new JsonConnector();
    Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "url", url("/single_doc.json"), "parserFeatures", "{ALLOW_COMMENTS:true}", "deserializationFeatures", "{USE_BIG_DECIMAL_FOR_FLOATS : false}", "mode", "SINGLE_DOCUMENT");
    connector.configure(settings, true, retainRecordSources);
    connector.init();
    assertThat(connector.readConcurrency()).isOne();
    List<Record> actual = Flux.merge(connector.read()).collectList().block();
    verifyRecords(actual, retainRecordSources, rawURL("/single_doc.json").toURI());
    connector.close();
}
Also used : Config(com.typesafe.config.Config) DefaultRecord(com.datastax.oss.dsbulk.connectors.api.DefaultRecord) Record(com.datastax.oss.dsbulk.connectors.api.Record) ValueSource(org.junit.jupiter.params.provider.ValueSource) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 17 with Record

use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.

the class JsonConnectorTest method should_write_to_stdout_with_special_encoding.

@Test
void should_write_to_stdout_with_special_encoding() throws Exception {
    PrintStream stdout = System.out;
    try {
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        PrintStream out = new PrintStream(baos);
        System.setOut(out);
        JsonConnector connector = new JsonConnector();
        Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "encoding", "ISO-8859-1");
        connector.configure(settings, false, true);
        connector.init();
        assertThat(connector.writeConcurrency()).isOne();
        assertThat(ReflectionUtils.invokeMethod("isDataSizeSamplingAvailable", connector, Boolean.TYPE)).isFalse();
        Flux.<Record>just(DefaultRecord.indexed("source", resource, 1, factory.textNode("fóô"), factory.textNode("bàr"), factory.textNode("qïx"))).transform(connector.write()).blockLast();
        connector.close();
        assertThat(new String(baos.toByteArray(), ISO_8859_1)).isEqualTo("{\"0\":\"fóô\",\"1\":\"bàr\",\"2\":\"qïx\"}" + System.lineSeparator());
    } finally {
        System.setOut(stdout);
    }
}
Also used : PrintStream(java.io.PrintStream) Config(com.typesafe.config.Config) DefaultRecord(com.datastax.oss.dsbulk.connectors.api.DefaultRecord) Record(com.datastax.oss.dsbulk.connectors.api.Record) ByteArrayOutputStream(java.io.ByteArrayOutputStream) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 18 with Record

use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.

the class JsonConnectorTest method should_honor_max_records_and_skip_records2.

@Test
void should_honor_max_records_and_skip_records2() throws Exception {
    JsonConnector connector = new JsonConnector();
    Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "url", url("/root/ip-by-country-sample1.json"), "skipRecords", 10, "maxRecords", 1);
    connector.configure(settings, true, true);
    connector.init();
    List<Record> records = Flux.merge(connector.read()).collectList().block();
    assertThat(records).hasSize(1);
    assertThat(records.get(0).getSource().toString().trim()).isEqualTo("{\"beginning IP Address\":\"212.63.180.20\",\"ending IP Address\":\"212.63.180.23\",\"beginning IP Number\":3560944660,\"ending IP Number\":3560944663,\"ISO 3166 Country Code\":\"MZ\",\"Country Name\":\"Mozambique\"}");
    connector.close();
}
Also used : Config(com.typesafe.config.Config) DefaultRecord(com.datastax.oss.dsbulk.connectors.api.DefaultRecord) Record(com.datastax.oss.dsbulk.connectors.api.Record) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 19 with Record

use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.

the class JsonConnectorTest method should_read_single_empty_file_multi_doc.

@Test
void should_read_single_empty_file_multi_doc() throws Exception {
    JsonConnector connector = new JsonConnector();
    Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "url", url("/empty.json"), "parserFeatures", "{ALLOW_COMMENTS:true}", "mode", "MULTI_DOCUMENT");
    connector.configure(settings, true, true);
    connector.init();
    assertThat(connector.readConcurrency()).isOne();
    // should complete with 0 records.
    List<Record> actual = Flux.merge(connector.read()).collectList().block();
    assertThat(actual).hasSize(0);
    connector.close();
}
Also used : Config(com.typesafe.config.Config) DefaultRecord(com.datastax.oss.dsbulk.connectors.api.DefaultRecord) Record(com.datastax.oss.dsbulk.connectors.api.Record) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 20 with Record

use of com.datastax.oss.dsbulk.connectors.api.Record in project dsbulk by datastax.

the class JsonConnectorTest method should_read_from_http_url.

@ParameterizedTest
@ValueSource(booleans = { true, false })
void should_read_from_http_url(boolean retainRecordSources, @Wiremock WireMockServer server) throws Exception {
    server.givenThat(any(urlPathEqualTo("/file.json")).willReturn(aResponse().withStatus(200).withHeader("Content-Type", "text/json").withBody(readFile(path("/single_doc.json")))));
    JsonConnector connector = new JsonConnector();
    String url = String.format("%s/file.json", server.baseUrl());
    Config settings = TestConfigUtils.createTestConfig("dsbulk.connector.json", "url", quoteJson(url), "mode", "SINGLE_DOCUMENT", "parserFeatures", "{ALLOW_COMMENTS:true}", "deserializationFeatures", "{USE_BIG_DECIMAL_FOR_FLOATS : false}");
    connector.configure(settings, true, retainRecordSources);
    connector.init();
    assertThat(connector.readConcurrency()).isOne();
    List<Record> actual = Flux.merge(connector.read()).collectList().block();
    verifyRecords(actual, retainRecordSources, new URI(url));
    connector.close();
}
Also used : Config(com.typesafe.config.Config) DefaultRecord(com.datastax.oss.dsbulk.connectors.api.DefaultRecord) Record(com.datastax.oss.dsbulk.connectors.api.Record) URI(java.net.URI) ValueSource(org.junit.jupiter.params.provider.ValueSource) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Aggregations

Record (com.datastax.oss.dsbulk.connectors.api.Record)54 DefaultRecord (com.datastax.oss.dsbulk.connectors.api.DefaultRecord)40 Config (com.typesafe.config.Config)39 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)37 Test (org.junit.jupiter.api.Test)35 ErrorRecord (com.datastax.oss.dsbulk.connectors.api.ErrorRecord)24 Path (java.nio.file.Path)24 DefaultIndexedField (com.datastax.oss.dsbulk.connectors.api.DefaultIndexedField)10 Function (java.util.function.Function)9 MethodSource (org.junit.jupiter.params.provider.MethodSource)9 DefaultMappedField (com.datastax.oss.dsbulk.connectors.api.DefaultMappedField)8 ArrayList (java.util.ArrayList)8 List (java.util.List)8 Publisher (org.reactivestreams.Publisher)8 DefaultErrorRecord (com.datastax.oss.dsbulk.connectors.api.DefaultErrorRecord)7 IOException (java.io.IOException)7 ValueSource (org.junit.jupiter.params.provider.ValueSource)7 Flux (reactor.core.publisher.Flux)7 DataStaxBulkLoader (com.datastax.oss.dsbulk.runner.DataStaxBulkLoader)6 ExitStatus (com.datastax.oss.dsbulk.runner.ExitStatus)6