use of org.apache.hadoop.conf.Configurable in project gora by apache.
the class DataStoreFactory method createDataStore.
/**
* Instantiate a new {@link DataStore}.
*
* @param dataStoreClass The datastore implementation class.
* @param keyClass The key class.
* @param persistent The value class.
* @param conf {@link Configuration} to be used be the store.
* @param properties The properties to be used be the store.
* @param schemaName A default schemaname that will be put on the properties.
* @return A new store instance.
* @throws GoraException
*/
public static <D extends DataStore<K, T>, K, T extends Persistent> D createDataStore(Class<D> dataStoreClass, Class<K> keyClass, Class<T> persistent, Configuration conf, Properties properties, String schemaName) throws GoraException {
try {
setDefaultSchemaName(properties, schemaName);
D dataStore = ReflectionUtils.newInstance(dataStoreClass);
if ((dataStore instanceof Configurable) && conf != null) {
((Configurable) dataStore).setConf(conf);
}
initializeDataStore(dataStore, keyClass, persistent, properties);
return dataStore;
} catch (GoraException ex) {
throw ex;
} catch (Exception ex) {
throw new GoraException(ex);
}
}
use of org.apache.hadoop.conf.Configurable in project jena by apache.
the class AbstractCompressedNodeTupleInputFormatTests method getOutputStream.
@Override
protected OutputStream getOutputStream(File f) throws IOException {
CompressionCodec codec = this.getCompressionCodec();
if (codec instanceof Configurable) {
((Configurable) codec).setConf(this.prepareConfiguration());
}
FileOutputStream fileOutput = new FileOutputStream(f, false);
return codec.createOutputStream(fileOutput);
}
use of org.apache.hadoop.conf.Configurable in project jena by apache.
the class AbstractCompressedWholeFileQuadInputFormatTests method getOutputStream.
@Override
protected OutputStream getOutputStream(File f) throws IOException {
CompressionCodec codec = this.getCompressionCodec();
if (codec instanceof Configurable) {
((Configurable) codec).setConf(this.prepareConfiguration());
}
FileOutputStream fileOutput = new FileOutputStream(f, false);
return codec.createOutputStream(fileOutput);
}
use of org.apache.hadoop.conf.Configurable in project jena by apache.
the class AbstractCompressedWholeFileTripleInputFormatTests method getOutputStream.
@Override
protected OutputStream getOutputStream(File f) throws IOException {
CompressionCodec codec = this.getCompressionCodec();
if (codec instanceof Configurable) {
((Configurable) codec).setConf(this.prepareConfiguration());
}
FileOutputStream fileOutput = new FileOutputStream(f, false);
return codec.createOutputStream(fileOutput);
}
use of org.apache.hadoop.conf.Configurable in project hive by apache.
the class HadoopThriftAuthBridge23 method getHadoopSaslProperties.
/**
* Read and return Hadoop SASL configuration which can be configured using
* "hadoop.rpc.protection"
*
* @param conf
* @return Hadoop SASL configuration
*/
@SuppressWarnings("unchecked")
@Override
public Map<String, String> getHadoopSaslProperties(Configuration conf) {
if (SASL_PROPS_FIELD != null) {
// hadoop 2.4 and earlier way of finding the sasl property settings
// Initialize the SaslRpcServer to ensure QOP parameters are read from
// conf
SaslRpcServer.init(conf);
try {
return (Map<String, String>) SASL_PROPS_FIELD.get(null);
} catch (Exception e) {
throw new IllegalStateException("Error finding hadoop SASL properties", e);
}
}
// 2.5 and later way of finding sasl property
try {
Configurable saslPropertiesResolver = (Configurable) RES_GET_INSTANCE_METHOD.invoke(null, conf);
saslPropertiesResolver.setConf(conf);
return (Map<String, String>) GET_DEFAULT_PROP_METHOD.invoke(saslPropertiesResolver);
} catch (Exception e) {
throw new IllegalStateException("Error finding hadoop SASL properties", e);
}
}
Aggregations