use of org.apache.spark.sql.sources.v2.DataSourceOptions in project spark-bigquery-connector by GoogleCloudDataproc.
the class SparkBigQueryProxyAndHttpConfigTest method testWhenProxyIsSetAndUserNameIsNull.
@Test
public void testWhenProxyIsSetAndUserNameIsNull() {
ImmutableMap<String, String> optionsMap = ImmutableMap.<String, String>builder().put("proxyAddress", "http://bq-connector-host:1234").put("proxyPassword", "bq-connector-password").build();
Configuration emptyHadoopConfiguration = new Configuration();
DataSourceOptions options = new DataSourceOptions(optionsMap);
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> SparkBigQueryProxyAndHttpConfig.from(options.asMap(), ImmutableMap.of(), emptyHadoopConfiguration));
assertThat(exception).hasMessageThat().contains("Both proxyUsername and proxyPassword should be defined or not defined together");
}
use of org.apache.spark.sql.sources.v2.DataSourceOptions in project spark-bigquery-connector by GoogleCloudDataproc.
the class SparkBigQueryProxyAndHttpConfigTest method testWhenProxyIsNotSetAndUserNamePasswordAreNotNull.
@Test
public void testWhenProxyIsNotSetAndUserNamePasswordAreNotNull() {
ImmutableMap<String, String> optionsMap = ImmutableMap.<String, String>builder().put("proxyUsername", "bq-connector-user").put("proxyPassword", "bq-connector-password").build();
Configuration emptyHadoopConfiguration = new Configuration();
DataSourceOptions options = new DataSourceOptions(optionsMap);
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> SparkBigQueryProxyAndHttpConfig.from(options.asMap(), ImmutableMap.of(), emptyHadoopConfiguration));
assertThat(exception).hasMessageThat().contains("Please set proxyAddress in order to use a proxy. " + "Setting proxyUsername or proxyPassword is not enough");
}
use of org.apache.spark.sql.sources.v2.DataSourceOptions in project spark-bigquery-connector by GoogleCloudDataproc.
the class SparkBigQueryProxyAndHttpConfigTest method testConfigViaSparkBigQueryConfigWithGlobalOptionsAndHadoopConfiguration.
@Test
public void testConfigViaSparkBigQueryConfigWithGlobalOptionsAndHadoopConfiguration() throws URISyntaxException {
HashMap<String, String> sparkConfigOptions = new HashMap<>();
sparkConfigOptions.put("table", "dataset.table");
ImmutableMap<String, String> globalOptions = SparkBigQueryConfig.normalizeConf(defaultGlobalOptions);
DataSourceOptions options = new DataSourceOptions(sparkConfigOptions);
SparkBigQueryConfig sparkConfig = SparkBigQueryConfig.from(// contains only one key "table"
options.asMap(), globalOptions, defaultHadoopConfiguration, 10, new SQLConf(), "2.4.0", Optional.empty());
SparkBigQueryProxyAndHttpConfig config = (SparkBigQueryProxyAndHttpConfig) sparkConfig.getBigQueryProxyConfig();
assertThat(config.getProxyUri()).isEqualTo(Optional.of(getURI("http", "bq-connector-host-global", 1234)));
assertThat(config.getProxyUsername()).isEqualTo(Optional.of("bq-connector-user-global"));
assertThat(config.getProxyPassword()).isEqualTo(Optional.of("bq-connector-password-global"));
assertThat(config.getHttpMaxRetry()).isEqualTo(Optional.of(20));
assertThat(config.getHttpConnectTimeout()).isEqualTo(Optional.of(20000));
assertThat(config.getHttpReadTimeout()).isEqualTo(Optional.of(30000));
}
use of org.apache.spark.sql.sources.v2.DataSourceOptions in project java-pubsublite-spark by googleapis.
the class PslReadDataSourceOptionsTest method testInvalidSubPath.
@Test
public void testInvalidSubPath() {
DataSourceOptions options = new DataSourceOptions(ImmutableMap.of(Constants.SUBSCRIPTION_CONFIG_KEY, "invalid/path"));
assertThrows(IllegalArgumentException.class, () -> PslReadDataSourceOptions.fromSparkDataSourceOptions(options));
}
use of org.apache.spark.sql.sources.v2.DataSourceOptions in project java-pubsublite-spark by googleapis.
the class PslWriteDataSourceOptionsTest method testInvalidTopicPath.
@Test
public void testInvalidTopicPath() {
DataSourceOptions options = new DataSourceOptions(ImmutableMap.of(Constants.TOPIC_CONFIG_KEY, "invalid/path"));
assertThrows(IllegalArgumentException.class, () -> PslWriteDataSourceOptions.fromSparkDataSourceOptions(options));
}
Aggregations