use of org.apache.spark.sql.sources.v2.DataSourceOptions in project spark-bigquery-connector by GoogleCloudDataproc.
the class SparkBigQueryProxyAndHttpConfigTest method testConfigWithAllThreeParameters.
@Test
public void testConfigWithAllThreeParameters() throws URISyntaxException {
DataSourceOptions options = new DataSourceOptions(defaultOptions);
ImmutableMap<String, String> globalOptions = SparkBigQueryConfig.normalizeConf(defaultGlobalOptions);
SparkBigQueryProxyAndHttpConfig config = SparkBigQueryProxyAndHttpConfig.from(options.asMap(), globalOptions, defaultHadoopConfiguration);
assertThat(config.getProxyUri()).isEqualTo(Optional.of(getURI("http", "bq-connector-host", 1234)));
assertThat(config.getProxyUsername()).isEqualTo(Optional.of("bq-connector-user"));
assertThat(config.getProxyPassword()).isEqualTo(Optional.of("bq-connector-password"));
assertThat(config.getHttpMaxRetry()).isEqualTo(Optional.of(10));
assertThat(config.getHttpConnectTimeout()).isEqualTo(Optional.of(10000));
assertThat(config.getHttpReadTimeout()).isEqualTo(Optional.of(20000));
}
use of org.apache.spark.sql.sources.v2.DataSourceOptions in project spark-bigquery-connector by GoogleCloudDataproc.
the class SparkBigQueryProxyAndHttpConfigTest method testWhenProxyIsSetAndPasswordIsNull.
@Test
public void testWhenProxyIsSetAndPasswordIsNull() {
ImmutableMap<String, String> optionsMap = ImmutableMap.<String, String>builder().put("proxyAddress", "http://bq-connector-host:1234").put("proxyUsername", "bq-connector-user").build();
Configuration emptyHadoopConfiguration = new Configuration();
DataSourceOptions options = new DataSourceOptions(optionsMap);
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> SparkBigQueryProxyAndHttpConfig.from(options.asMap(), ImmutableMap.of(), emptyHadoopConfiguration));
assertThat(exception).hasMessageThat().contains("Both proxyUsername and proxyPassword should be defined or not defined together");
}
use of org.apache.spark.sql.sources.v2.DataSourceOptions in project spark-bigquery-connector by GoogleCloudDataproc.
the class SparkBigQueryProxyAndHttpConfigTest method testSerializability.
@Test
public void testSerializability() throws IOException {
DataSourceOptions options = new DataSourceOptions(defaultOptions);
// test to make sure all members can be serialized.
new ObjectOutputStream(new ByteArrayOutputStream()).writeObject(SparkBigQueryProxyAndHttpConfig.from(options.asMap(), defaultGlobalOptions, defaultHadoopConfiguration));
}
use of org.apache.spark.sql.sources.v2.DataSourceOptions in project spark-bigquery-connector by GoogleCloudDataproc.
the class SparkBigQueryProxyAndHttpConfigTest method testConfigFromOptions.
@Test
public void testConfigFromOptions() throws URISyntaxException {
Configuration emptyHadoopConfiguration = new Configuration();
DataSourceOptions options = new DataSourceOptions(defaultOptions);
SparkBigQueryProxyAndHttpConfig config = SparkBigQueryProxyAndHttpConfig.from(options.asMap(), // empty globalOptions
ImmutableMap.of(), emptyHadoopConfiguration);
assertThat(config.getProxyUri()).isEqualTo(Optional.of(getURI("http", "bq-connector-host", 1234)));
assertThat(config.getProxyUsername()).isEqualTo(Optional.of("bq-connector-user"));
assertThat(config.getProxyPassword()).isEqualTo(Optional.of("bq-connector-password"));
assertThat(config.getHttpMaxRetry()).isEqualTo(Optional.of(10));
assertThat(config.getHttpConnectTimeout()).isEqualTo(Optional.of(10000));
assertThat(config.getHttpReadTimeout()).isEqualTo(Optional.of(20000));
}
use of org.apache.spark.sql.sources.v2.DataSourceOptions in project spark-bigquery-connector by GoogleCloudDataproc.
the class SparkBigQueryProxyAndHttpConfigTest method testConfigViaSparkBigQueryConfigWithHadoopConfiguration.
@Test
public void testConfigViaSparkBigQueryConfigWithHadoopConfiguration() throws URISyntaxException {
HashMap<String, String> sparkConfigOptions = new HashMap<>();
sparkConfigOptions.put("table", "dataset.table");
DataSourceOptions options = new DataSourceOptions(sparkConfigOptions);
SparkBigQueryConfig sparkConfig = SparkBigQueryConfig.from(// contains only one key "table"
options.asMap(), // empty global options,
ImmutableMap.of(), defaultHadoopConfiguration, 10, new SQLConf(), "2.4.0", Optional.empty());
SparkBigQueryProxyAndHttpConfig config = (SparkBigQueryProxyAndHttpConfig) sparkConfig.getBigQueryProxyConfig();
assertThat(config.getProxyUri()).isEqualTo(Optional.of(getURI("http", "bq-connector-host-hadoop", 1234)));
assertThat(config.getProxyUsername()).isEqualTo(Optional.of("bq-connector-user-hadoop"));
assertThat(config.getProxyPassword()).isEqualTo(Optional.of("bq-connector-password-hadoop"));
assertThat(config.getHttpMaxRetry()).isEqualTo(Optional.of(30));
assertThat(config.getHttpConnectTimeout()).isEqualTo(Optional.of(30000));
assertThat(config.getHttpReadTimeout()).isEqualTo(Optional.of(40000));
}
Aggregations