use of org.springframework.core.env.AbstractEnvironment in project kylo by Teradata.
the class SparkShellApp method sparkConf.
/**
* Creates the Spark configuration.
*
* @return the Spark configuration
*/
@Bean
public SparkConf sparkConf(final Environment env, @Qualifier("sparkShellPort") final int serverPort) {
final SparkConf conf = new SparkConf().setAppName("SparkShellServer").set("spark.ui.port", Integer.toString(serverPort + 1));
final Iterable<Map.Entry<String, Object>> properties = FluentIterable.from(Collections.singleton(env)).filter(AbstractEnvironment.class).transformAndConcat(new Function<AbstractEnvironment, Iterable<?>>() {
@Nullable
@Override
public Iterable<?> apply(@Nullable final AbstractEnvironment input) {
return (input != null) ? input.getPropertySources() : null;
}
}).filter(ResourcePropertySource.class).transform(new Function<ResourcePropertySource, Map<String, Object>>() {
@Nullable
@Override
public Map<String, Object> apply(@Nullable final ResourcePropertySource input) {
return (input != null) ? input.getSource() : null;
}
}).transformAndConcat(new Function<Map<String, Object>, Iterable<Map.Entry<String, Object>>>() {
@Nullable
@Override
public Iterable<Map.Entry<String, Object>> apply(@Nullable final Map<String, Object> input) {
return (input != null) ? input.entrySet() : null;
}
}).filter(new Predicate<Map.Entry<String, Object>>() {
@Override
public boolean apply(@Nullable final Map.Entry<String, Object> input) {
return (input != null && input.getKey().startsWith("spark."));
}
});
for (final Map.Entry<String, Object> entry : properties) {
conf.set(entry.getKey(), entry.getValue().toString());
}
return conf;
}
use of org.springframework.core.env.AbstractEnvironment in project pinpoint by naver.
the class ExperimentalConfig method readExperimentalProperties.
private Map<String, Object> readExperimentalProperties(Environment environment) {
MutablePropertySources propertySources = ((AbstractEnvironment) environment).getPropertySources();
Map<String, Object> collect = propertySources.stream().filter(ps -> ps instanceof EnumerablePropertySource).map(ps -> ((EnumerablePropertySource) ps).getPropertyNames()).flatMap(Arrays::stream).filter(propName -> propName.startsWith(PREFIX)).collect(Collectors.toMap(Function.identity(), toValue(environment)));
return collect;
}
use of org.springframework.core.env.AbstractEnvironment in project spring-boot by spring-projects.
the class EnvironmentConverterTests method convertedEnvironmentHasSameConversionService.
@Test
void convertedEnvironmentHasSameConversionService() {
AbstractEnvironment originalEnvironment = new MockEnvironment();
ConfigurableConversionService conversionService = mock(ConfigurableConversionService.class);
originalEnvironment.setConversionService(conversionService);
StandardEnvironment convertedEnvironment = this.environmentConverter.convertEnvironmentIfNecessary(originalEnvironment, StandardEnvironment.class);
assertThat(convertedEnvironment.getConversionService()).isEqualTo(conversionService);
}
use of org.springframework.core.env.AbstractEnvironment in project dubbo by alibaba.
the class DubboRelaxedBinding2AutoConfiguration method dubboScanBasePackagesPropertyResolver.
public PropertyResolver dubboScanBasePackagesPropertyResolver(ConfigurableEnvironment environment) {
ConfigurableEnvironment propertyResolver = new AbstractEnvironment() {
@Override
protected void customizePropertySources(MutablePropertySources propertySources) {
Map<String, Object> dubboScanProperties = getSubProperties(environment.getPropertySources(), DUBBO_SCAN_PREFIX);
propertySources.addLast(new MapPropertySource("dubboScanProperties", dubboScanProperties));
}
};
ConfigurationPropertySources.attach(propertyResolver);
return propertyResolver;
}
use of org.springframework.core.env.AbstractEnvironment in project kylo by Teradata.
the class LivyWranglerConfig method sparkConf.
/**
* Creates the Spark configuration.
*
* @return the Spark configuration
*/
@Bean
@Primary
public SparkConf sparkConf(final Environment env) /*, @Qualifier("sparkShellPort") final int serverPort*/
{
final SparkConf conf = new SparkConf().setAppName("SparkShellServer");
// .set("spark.ui.port", Integer.toString(serverPort + 1));
final Iterable<Map.Entry<String, Object>> properties = FluentIterable.from(Collections.singleton(env)).filter(AbstractEnvironment.class).transformAndConcat(new Function<AbstractEnvironment, Iterable<?>>() {
@Nullable
@Override
public Iterable<?> apply(@Nullable final AbstractEnvironment input) {
return (input != null) ? input.getPropertySources() : null;
}
}).filter(ResourcePropertySource.class).transform(new Function<ResourcePropertySource, Map<String, Object>>() {
@Nullable
@Override
public Map<String, Object> apply(@Nullable final ResourcePropertySource input) {
return (input != null) ? input.getSource() : null;
}
}).transformAndConcat(new Function<Map<String, Object>, Iterable<Map.Entry<String, Object>>>() {
@Nullable
@Override
public Iterable<Map.Entry<String, Object>> apply(@Nullable final Map<String, Object> input) {
return (input != null) ? input.entrySet() : null;
}
}).filter(new Predicate<Map.Entry<String, Object>>() {
@Override
public boolean apply(@Nullable final Map.Entry<String, Object> input) {
return (input != null && input.getKey().startsWith("spark."));
}
});
for (final Map.Entry<String, Object> entry : properties) {
conf.set(entry.getKey(), entry.getValue().toString());
}
return conf;
}
Aggregations