use of org.springframework.core.env.AbstractEnvironment in project kylo by Teradata.
the class KyloShellConfig method sparkConf.
/**
* Creates the Spark configuration.
*
* @return the Spark configuration
*/
@Bean
public SparkConf sparkConf(final Environment env, @Qualifier("sparkShellPort") final int serverPort) {
final SparkConf conf = new SparkConf().setAppName("SparkShellServer").set("spark.ui.port", Integer.toString(serverPort + 1));
final Iterable<Map.Entry<String, Object>> properties = FluentIterable.from(Collections.singleton(env)).filter(AbstractEnvironment.class).transformAndConcat(new Function<AbstractEnvironment, Iterable<?>>() {
@Nullable
@Override
public Iterable<?> apply(@Nullable final AbstractEnvironment input) {
return (input != null) ? input.getPropertySources() : null;
}
}).filter(ResourcePropertySource.class).transform(new Function<ResourcePropertySource, Map<String, Object>>() {
@Nullable
@Override
public Map<String, Object> apply(@Nullable final ResourcePropertySource input) {
return (input != null) ? input.getSource() : null;
}
}).transformAndConcat(new Function<Map<String, Object>, Iterable<Map.Entry<String, Object>>>() {
@Nullable
@Override
public Iterable<Map.Entry<String, Object>> apply(@Nullable final Map<String, Object> input) {
return (input != null) ? input.entrySet() : null;
}
}).filter(new Predicate<Map.Entry<String, Object>>() {
@Override
public boolean apply(@Nullable final Map.Entry<String, Object> input) {
return (input != null && input.getKey().startsWith("spark."));
}
});
for (final Map.Entry<String, Object> entry : properties) {
conf.set(entry.getKey(), entry.getValue().toString());
}
return conf;
}
use of org.springframework.core.env.AbstractEnvironment in project spring-boot by spring-projects.
the class EnvironmentConverterTests method convertedEnvironmentHasSameActiveProfiles.
@Test
void convertedEnvironmentHasSameActiveProfiles() {
AbstractEnvironment originalEnvironment = new MockEnvironment();
originalEnvironment.setActiveProfiles("activeProfile1", "activeProfile2");
StandardEnvironment convertedEnvironment = this.environmentConverter.convertEnvironmentIfNecessary(originalEnvironment, StandardEnvironment.class);
assertThat(convertedEnvironment.getActiveProfiles()).containsExactly("activeProfile1", "activeProfile2");
}
use of org.springframework.core.env.AbstractEnvironment in project incubator-dubbo-spring-boot-project by apache.
the class DubboRelaxedBinding2AutoConfiguration method dubboScanBasePackagesPropertyResolver.
public PropertyResolver dubboScanBasePackagesPropertyResolver(ConfigurableEnvironment environment) {
ConfigurableEnvironment propertyResolver = new AbstractEnvironment() {
@Override
protected void customizePropertySources(MutablePropertySources propertySources) {
Map<String, Object> dubboScanProperties = getSubProperties(environment.getPropertySources(), DUBBO_SCAN_PREFIX);
propertySources.addLast(new MapPropertySource("dubboScanProperties", dubboScanProperties));
}
};
ConfigurationPropertySources.attach(propertyResolver);
return propertyResolver;
}
use of org.springframework.core.env.AbstractEnvironment in project kylo by Teradata.
the class LivyProperties method postConstruct.
@PostConstruct
private void postConstruct() {
logger.debug("PostConstruct called for LivyProperties");
if (!Lists.newArrayList(env.getActiveProfiles()).contains("kylo-livy")) {
throw new IllegalStateException("Attempting to instantiate LivyProperties bean when 'kylo-livy' is not an active profile");
}
if (!StringUtils.isNotEmpty(hostname)) {
throw new LivyConfigurationException("Attempt to start when 'kylo-livy' is an active profile and property 'spark.livy.hostname' not defined, or invalid.");
}
if (port == null || port <= 0) {
throw new LivyConfigurationException("Attempt to start when 'kylo-livy' is an active profile and property 'spark.livy.port' not defined, or invalid.");
}
logger.debug("determine the set of spark properties to pass to Livy");
MutablePropertySources propSrcs = ((AbstractEnvironment) env).getPropertySources();
StreamSupport.stream(propSrcs.spliterator(), false).filter(ps -> ps instanceof EnumerablePropertySource).map(ps -> ((EnumerablePropertySource) ps).getPropertyNames()).flatMap(Arrays::<String>stream).filter(propName -> propName.startsWith("spark.") && !(propName.startsWith("spark.livy.") || propName.startsWith("spark.shell."))).forEach(propName -> sparkProperties.put(propName, env.getProperty(propName)));
logger.debug("Validate session kinds are supportable");
if (!(livySessionKind.equals(SessionKind.shared) || livySessionKind.equals(SessionKind.spark))) {
throw new LivyConfigurationException(String.format("Session kind='%s' is not yet supported"));
}
logger.info("The following spark properties were found in kylo config files: '{}'", sparkProperties);
}
use of org.springframework.core.env.AbstractEnvironment in project Gaffer by gchq.
the class FactoryConfigTest method shouldUsePropertiesFromEnvironmentToSetUpGraphFactory.
@Test
public void shouldUsePropertiesFromEnvironmentToSetUpGraphFactory() throws InstantiationException, IllegalAccessException {
// Given
FactoryConfig factoryConfig = new FactoryConfig();
AbstractEnvironment mockEnv = mock(AbstractEnvironment.class);
Properties properties = new Properties();
properties.setProperty(GRAPH_FACTORY_CLASS, MockGraphFactory.class.getName());
MockPropertySource mockPropertySource = new MockPropertySource(properties);
MutablePropertySources propertySources = new MutablePropertySources();
propertySources.addFirst(mockPropertySource);
when(mockEnv.getPropertySources()).thenReturn(propertySources);
when(mockEnv.getProperty(GRAPH_FACTORY_CLASS)).then(invocation -> mockPropertySource.getProperty(invocation.getArgumentAt(0, String.class)));
// called by spring normally
factoryConfig.setEnvironment(mockEnv);
// When
// Called by spring
factoryConfig.setToSystemProperties();
// Then
assertEquals(MockGraphFactory.class, factoryConfig.createGraphFactory().getClass());
}
Aggregations