use of org.apache.accumulo.core.conf.Property in project accumulo by apache.
the class ClientOpts method getClientConfiguration.
public ClientConfiguration getClientConfiguration() throws IllegalArgumentException {
if (cachedClientConfig != null)
return cachedClientConfig;
ClientConfiguration clientConfig;
try {
if (clientConfigFile == null)
clientConfig = ClientConfiguration.loadDefault();
else
clientConfig = ClientConfiguration.fromFile(new File(clientConfigFile));
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
if (sslEnabled)
clientConfig.setProperty(ClientProperty.INSTANCE_RPC_SSL_ENABLED, "true");
if (saslEnabled)
clientConfig.setProperty(ClientProperty.INSTANCE_RPC_SASL_ENABLED, "true");
if (siteFile != null) {
AccumuloConfiguration config = new AccumuloConfiguration() {
Configuration xml = new Configuration();
{
xml.addResource(new Path(siteFile));
}
@Override
public void getProperties(Map<String, String> props, Predicate<String> filter) {
for (Entry<String, String> prop : DefaultConfiguration.getInstance()) if (filter.test(prop.getKey()))
props.put(prop.getKey(), prop.getValue());
for (Entry<String, String> prop : xml) if (filter.test(prop.getKey()))
props.put(prop.getKey(), prop.getValue());
}
@Override
public String get(Property property) {
String value = xml.get(property.getKey());
if (value != null)
return value;
return DefaultConfiguration.getInstance().get(property);
}
};
this.zookeepers = config.get(Property.INSTANCE_ZK_HOST);
String volDir = VolumeConfiguration.getVolumeUris(config)[0];
Path instanceDir = new Path(volDir, "instance_id");
String instanceIDFromFile = ZooUtil.getInstanceIDFromHdfs(instanceDir, config);
if (config.getBoolean(Property.INSTANCE_RPC_SSL_ENABLED))
clientConfig.setProperty(ClientProperty.INSTANCE_RPC_SSL_ENABLED, "true");
return cachedClientConfig = clientConfig.withInstance(UUID.fromString(instanceIDFromFile)).withZkHosts(zookeepers);
}
return cachedClientConfig = clientConfig.withInstance(instance).withZkHosts(zookeepers);
}
use of org.apache.accumulo.core.conf.Property in project accumulo by apache.
the class TableOperationsImpl method addSummarizers.
@Override
public void addSummarizers(String tableName, SummarizerConfiguration... newConfigs) throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
HashSet<SummarizerConfiguration> currentConfigs = new HashSet<>(SummarizerConfiguration.fromTableProperties(getProperties(tableName)));
HashSet<SummarizerConfiguration> newConfigSet = new HashSet<>(Arrays.asList(newConfigs));
newConfigSet.removeIf(sc -> currentConfigs.contains(sc));
Set<String> newIds = newConfigSet.stream().map(sc -> sc.getPropertyId()).collect(toSet());
for (SummarizerConfiguration csc : currentConfigs) {
if (newIds.contains(csc.getPropertyId())) {
throw new IllegalArgumentException("Summarizer property id is in use by " + csc);
}
}
Set<Entry<String, String>> es = SummarizerConfiguration.toTableProperties(newConfigSet).entrySet();
for (Entry<String, String> entry : es) {
setProperty(tableName, entry.getKey(), entry.getValue());
}
}
use of org.apache.accumulo.core.conf.Property in project accumulo by apache.
the class ClientContext method convertClientConfig.
/**
* A utility method for converting client configuration to a standard configuration object for use internally.
*
* @param config
* the original {@link ClientConfiguration}
* @return the client configuration presented in the form of an {@link AccumuloConfiguration}
*/
public static AccumuloConfiguration convertClientConfig(final ClientConfiguration config) {
final AccumuloConfiguration defaults = DefaultConfiguration.getInstance();
return new AccumuloConfiguration() {
@Override
public String get(Property property) {
final String key = property.getKey();
// Attempt to load sensitive properties from a CredentialProvider, if configured
if (property.isSensitive()) {
org.apache.hadoop.conf.Configuration hadoopConf = getHadoopConfiguration();
if (null != hadoopConf) {
try {
char[] value = CredentialProviderFactoryShim.getValueFromCredentialProvider(hadoopConf, key);
if (null != value) {
log.trace("Loaded sensitive value for {} from CredentialProvider", key);
return new String(value);
} else {
log.trace("Tried to load sensitive value for {} from CredentialProvider, but none was found", key);
}
} catch (IOException e) {
log.warn("Failed to extract sensitive property ({}) from Hadoop CredentialProvider, falling back to base AccumuloConfiguration", key, e);
}
}
}
if (config.containsKey(key))
return config.getString(key);
else {
// Reconstitute the server kerberos property from the client config
if (Property.GENERAL_KERBEROS_PRINCIPAL == property) {
if (config.containsKey(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey())) {
// Avoid providing a realm since we don't know what it is...
return config.getString(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey()) + "/_HOST@" + SaslConnectionParams.getDefaultRealm();
}
}
return defaults.get(property);
}
}
@Override
public void getProperties(Map<String, String> props, Predicate<String> filter) {
defaults.getProperties(props, filter);
Iterator<String> keyIter = config.getKeys();
while (keyIter.hasNext()) {
String key = keyIter.next().toString();
if (filter.test(key))
props.put(key, config.getString(key));
}
// Automatically reconstruct the server property when converting a client config.
if (props.containsKey(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey())) {
final String serverPrimary = props.remove(ClientConfiguration.ClientProperty.KERBEROS_SERVER_PRIMARY.getKey());
if (filter.test(Property.GENERAL_KERBEROS_PRINCIPAL.getKey())) {
// Use the _HOST expansion. It should be unnecessary in "client land".
props.put(Property.GENERAL_KERBEROS_PRINCIPAL.getKey(), serverPrimary + "/_HOST@" + SaslConnectionParams.getDefaultRealm());
}
}
// Attempt to load sensitive properties from a CredentialProvider, if configured
org.apache.hadoop.conf.Configuration hadoopConf = getHadoopConfiguration();
if (null != hadoopConf) {
try {
for (String key : CredentialProviderFactoryShim.getKeys(hadoopConf)) {
if (!Property.isValidPropertyKey(key) || !Property.isSensitive(key)) {
continue;
}
if (filter.test(key)) {
char[] value = CredentialProviderFactoryShim.getValueFromCredentialProvider(hadoopConf, key);
if (null != value) {
props.put(key, new String(value));
}
}
}
} catch (IOException e) {
log.warn("Failed to extract sensitive properties from Hadoop CredentialProvider, falling back to accumulo-site.xml", e);
}
}
}
private org.apache.hadoop.conf.Configuration getHadoopConfiguration() {
String credProviderPaths = config.getString(Property.GENERAL_SECURITY_CREDENTIAL_PROVIDER_PATHS.getKey());
if (null != credProviderPaths && !credProviderPaths.isEmpty()) {
org.apache.hadoop.conf.Configuration hadoopConf = new org.apache.hadoop.conf.Configuration();
hadoopConf.set(CredentialProviderFactoryShim.CREDENTIAL_PROVIDER_PATH, credProviderPaths);
return hadoopConf;
}
log.trace("Did not find credential provider configuration in ClientConfiguration");
return null;
}
};
}
use of org.apache.accumulo.core.conf.Property in project accumulo by apache.
the class RFileTest method testCryptoDoesntLeakSensitive.
@Test
public void testCryptoDoesntLeakSensitive() throws IOException {
conf = setAndGetAccumuloConfig(CryptoTest.CRYPTO_ON_CONF);
// test an empty file
TestRFile trf = new TestRFile(conf);
trf.openWriter();
trf.closeWriter();
byte[] rfBytes = trf.baos.toByteArray();
// If we get here, we have encrypted bytes
for (Property prop : Property.values()) {
if (prop.isSensitive()) {
byte[] toCheck = prop.getKey().getBytes();
assertEquals(-1, Bytes.indexOf(rfBytes, toCheck));
}
}
}
use of org.apache.accumulo.core.conf.Property in project accumulo by apache.
the class CryptoTest method testCryptoModuleDoesntLeakSensitive.
@Test
public void testCryptoModuleDoesntLeakSensitive() throws IOException {
AccumuloConfiguration conf = setAndGetAccumuloConfig(CRYPTO_ON_CONF);
CryptoModuleParameters params = CryptoModuleFactory.createParamsObjectFromAccumuloConfiguration(conf);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
params.setPlaintextOutputStream(baos);
CryptoModule cryptoModule = CryptoModuleFactory.getCryptoModule(conf);
cryptoModule.getEncryptingOutputStream(params);
params.getEncryptedOutputStream().close();
// If we get here, we have encrypted bytes
byte[] streamBytes = baos.toByteArray();
for (Property prop : Property.values()) {
if (prop.isSensitive()) {
byte[] toCheck = prop.getKey().getBytes();
assertEquals(-1, Bytes.indexOf(streamBytes, toCheck));
}
}
}
Aggregations