use of org.elasticsearch.common.SuppressForbidden in project elasticsearch by elastic.
the class TikaImpl method addReadPermissions.
// add resources to (what is typically) a jar, but might not be (e.g. in tests/IDE)
@SuppressForbidden(reason = "adds access to jar resources")
static void addReadPermissions(Permissions perms, URL[] resources) {
try {
for (URL url : resources) {
Path path = PathUtils.get(url.toURI());
// resource itself
perms.add(new FilePermission(path.toString(), "read,readlink"));
// classes underneath
perms.add(new FilePermission(path.toString() + System.getProperty("file.separator") + "-", "read,readlink"));
}
} catch (URISyntaxException bogus) {
throw new RuntimeException(bogus);
}
}
use of org.elasticsearch.common.SuppressForbidden in project elasticsearch by elastic.
the class HdfsPlugin method evilHadoopInit.
@SuppressForbidden(reason = "Needs a security hack for hadoop on windows, until HADOOP-XXXX is fixed")
private static Void evilHadoopInit() {
// hack: on Windows, Shell's clinit has a similar problem that on unix,
// but here we can workaround it for now by setting hadoop home
// on unix: we still want to set this to something we control, because
// if the user happens to have HADOOP_HOME in their environment -> checkHadoopHome goes boom
// TODO: remove THIS when hadoop is fixed
Path hadoopHome = null;
String oldValue = null;
try {
hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath();
oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString());
Class.forName("org.apache.hadoop.security.UserGroupInformation");
Class.forName("org.apache.hadoop.util.StringUtils");
Class.forName("org.apache.hadoop.util.ShutdownHookManager");
Class.forName("org.apache.hadoop.conf.Configuration");
Class.forName("org.apache.hadoop.hdfs.protocol.HdfsConstants");
Class.forName("org.apache.hadoop.hdfs.protocol.datatransfer.PipelineAck");
} catch (ClassNotFoundException | IOException e) {
throw new RuntimeException(e);
} finally {
// try to clean up the hack
if (oldValue == null) {
System.clearProperty("hadoop.home.dir");
} else {
System.setProperty("hadoop.home.dir", oldValue);
}
try {
// try to clean up our temp dir too if we can
if (hadoopHome != null) {
Files.delete(hadoopHome);
}
} catch (IOException thisIsBestEffort) {
}
}
return null;
}
use of org.elasticsearch.common.SuppressForbidden in project elasticsearch by elastic.
the class HdfsRepository method createContext.
// create hadoop filecontext
@SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)")
private static FileContext createContext(URI uri, Settings repositorySettings) {
Configuration cfg = new Configuration(repositorySettings.getAsBoolean("load_defaults", true));
cfg.setClassLoader(HdfsRepository.class.getClassLoader());
cfg.reloadConfiguration();
Map<String, String> map = repositorySettings.getByPrefix("conf.").getAsMap();
for (Entry<String, String> entry : map.entrySet()) {
cfg.set(entry.getKey(), entry.getValue());
}
// create a hadoop user. if we want some auth, it must be done different anyway, and tested.
Subject subject;
try {
Class<?> clazz = Class.forName("org.apache.hadoop.security.User");
Constructor<?> ctor = clazz.getConstructor(String.class);
ctor.setAccessible(true);
Principal principal = (Principal) ctor.newInstance(System.getProperty("user.name"));
subject = new Subject(false, Collections.singleton(principal), Collections.emptySet(), Collections.emptySet());
} catch (ReflectiveOperationException e) {
throw new RuntimeException(e);
}
// disable FS cache
cfg.setBoolean("fs.hdfs.impl.disable.cache", true);
// create the filecontext with our user
return Subject.doAs(subject, (PrivilegedAction<FileContext>) () -> {
try {
AbstractFileSystem fs = AbstractFileSystem.get(uri, cfg);
return FileContext.getFileContext(fs, cfg);
} catch (UnsupportedFileSystemException e) {
throw new RuntimeException(e);
}
});
}
Aggregations