use of org.apache.hadoop.hive.ql.exec.AddToClassPathAction in project hive by apache.
the class FunctionLocalizer method refreshClassloader.
public void refreshClassloader() throws IOException {
if (recentlyLocalizedJars.isEmpty())
return;
String[] jars = recentlyLocalizedJars.toArray(new String[0]);
recentlyLocalizedJars.clear();
ClassLoader updatedCl = null;
try {
AddToClassPathAction addAction = new AddToClassPathAction(executorClassloader, Arrays.asList(jars));
updatedCl = AccessController.doPrivileged(addAction);
if (LOG.isInfoEnabled()) {
LOG.info("Added " + jars.length + " jars to classpath");
}
} catch (Throwable t) {
// TODO: we could fall back to trying one by one and only ignore the failed ones.
logRefreshError("Unable to localize jars: ", jars, t);
// logRefreshError always throws.
return;
}
if (updatedCl != executorClassloader) {
throw new AssertionError("Classloader was replaced despite using UDFClassLoader: new " + updatedCl + ", old " + executorClassloader);
}
String[] classNames = recentlyLocalizedClasses.toArray(jars);
recentlyLocalizedClasses.clear();
try {
for (String className : classNames) {
allowedUdfClasses.put(Class.forName(className, false, executorClassloader), Boolean.TRUE);
}
} catch (Throwable t) {
// TODO: we could fall back to trying one by one and only ignore the failed ones.
logRefreshError("Unable to instantiate localized classes: ", classNames, t);
// logRefreshError always throws.
return;
}
}
use of org.apache.hadoop.hive.ql.exec.AddToClassPathAction in project hive by apache.
the class ResourceMaps method loadReloadableAuxJars.
/**
* Reload the jars under the path specified in hive.reloadable.aux.jars.path property.
*
* @throws IOException
*/
public void loadReloadableAuxJars() throws IOException {
LOG.info("Reloading auxiliary JAR files");
final String renewableJarPath = sessionConf.getVar(ConfVars.HIVERELOADABLEJARS);
// do nothing if this property is not specified or empty
if (StringUtils.isBlank(renewableJarPath)) {
LOG.warn("Configuration {} not specified", ConfVars.HIVERELOADABLEJARS);
return;
}
// load jars under the hive.reloadable.aux.jars.path
final Set<String> jarPaths = FileUtils.getJarFilesByPath(renewableJarPath, sessionConf);
LOG.info("Auxiliary JAR files discovered for reload: {}", jarPaths);
// remove the previous renewable jars
if (!preReloadableAuxJars.isEmpty()) {
Utilities.removeFromClassPath(preReloadableAuxJars.toArray(new String[0]));
}
if (!jarPaths.isEmpty()) {
AddToClassPathAction addAction = new AddToClassPathAction(SessionState.get().getConf().getClassLoader(), jarPaths);
final ClassLoader currentCLoader = AccessController.doPrivileged(addAction);
sessionConf.setClassLoader(currentCLoader);
Thread.currentThread().setContextClassLoader(currentCLoader);
}
preReloadableAuxJars.clear();
preReloadableAuxJars.addAll(jarPaths);
}
use of org.apache.hadoop.hive.ql.exec.AddToClassPathAction in project hive by apache.
the class ResourceMaps method registerJars.
static void registerJars(List<String> newJars) throws IllegalArgumentException {
LogHelper console = getConsole();
try {
AddToClassPathAction addAction = new AddToClassPathAction(Thread.currentThread().getContextClassLoader(), newJars);
final ClassLoader newLoader = AccessController.doPrivileged(addAction);
Thread.currentThread().setContextClassLoader(newLoader);
SessionState.get().getConf().setClassLoader(newLoader);
console.printInfo("Added " + newJars + " to class path");
} catch (Exception e) {
String message = "Unable to register " + newJars;
throw new IllegalArgumentException(message, e);
}
}
use of org.apache.hadoop.hive.ql.exec.AddToClassPathAction in project hive by apache.
the class ResourceMaps method loadAuxJars.
/**
* Load the jars under the path specified in hive.aux.jars.path property. Add
* the jars to the classpath so the local task can refer to them.
* @throws IOException
*/
public void loadAuxJars() throws IOException {
String[] jarPaths = StringUtils.split(sessionConf.getAuxJars(), ',');
if (ArrayUtils.isEmpty(jarPaths)) {
return;
}
AddToClassPathAction addAction = new AddToClassPathAction(SessionState.get().getConf().getClassLoader(), Arrays.asList(jarPaths));
final ClassLoader currentCLoader = AccessController.doPrivileged(addAction);
sessionConf.setClassLoader(currentCLoader);
Thread.currentThread().setContextClassLoader(currentCLoader);
}
use of org.apache.hadoop.hive.ql.exec.AddToClassPathAction in project hive by apache.
the class ExecDriver method main.
@SuppressWarnings("unchecked")
public static void main(String[] args) throws IOException, HiveException {
String planFileName = null;
String jobConfFileName = null;
boolean noLog = false;
String files = null;
String libjars = null;
boolean localtask = false;
try {
for (int i = 0; i < args.length; i++) {
if (args[i].equals("-plan")) {
planFileName = args[++i];
} else if (args[i].equals("-jobconffile")) {
jobConfFileName = args[++i];
} else if (args[i].equals("-nolog")) {
noLog = true;
} else if (args[i].equals("-files")) {
files = args[++i];
} else if (args[i].equals("-libjars")) {
libjars = args[++i];
} else if (args[i].equals("-localtask")) {
localtask = true;
}
}
} catch (IndexOutOfBoundsException e) {
System.err.println("Missing argument to option");
printUsage();
}
JobConf conf;
if (localtask) {
conf = new JobConf(MapredLocalTask.class);
} else {
conf = new JobConf(ExecDriver.class);
}
if (jobConfFileName != null) {
conf.addResource(new Path(jobConfFileName));
}
// Initialize the resources from command line
if (files != null) {
conf.set("tmpfiles", files);
}
if (libjars != null) {
conf.set("tmpjars", libjars);
}
if (UserGroupInformation.isSecurityEnabled()) {
String hadoopAuthToken = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
if (hadoopAuthToken != null) {
conf.set("mapreduce.job.credentials.binary", hadoopAuthToken);
}
}
boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);
String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID, "").trim();
if (queryId.isEmpty()) {
queryId = "unknown-" + System.currentTimeMillis();
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEQUERYID, queryId);
}
System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
LogUtils.registerLoggingContext(conf);
if (noLog) {
// If started from main(), and noLog is on, we should not output
// any logs. To turn the log on, please set -Dtest.silent=false
org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
NullAppender appender = NullAppender.createNullAppender();
appender.addToLogger(logger.getName(), Level.ERROR);
appender.start();
} else {
setupChildLog4j(conf);
}
Logger LOG = LoggerFactory.getLogger(ExecDriver.class.getName());
LogHelper console = new LogHelper(LOG, isSilent);
if (planFileName == null) {
console.printError("Must specify Plan File Name");
printUsage();
}
// that it's easy to find reason for local mode execution failures
for (Appender appender : ((org.apache.logging.log4j.core.Logger) LogManager.getRootLogger()).getAppenders().values()) {
if (appender instanceof FileAppender) {
console.printInfo("Execution log at: " + ((FileAppender) appender).getFileName());
} else if (appender instanceof RollingFileAppender) {
console.printInfo("Execution log at: " + ((RollingFileAppender) appender).getFileName());
}
}
// the plan file should always be in local directory
Path p = new Path(planFileName);
FileSystem fs = FileSystem.getLocal(conf);
InputStream pathData = fs.open(p);
// child process. so we add it here explicitly
try {
// see also - code in CliDriver.java
ClassLoader loader = conf.getClassLoader();
if (StringUtils.isNotBlank(libjars)) {
AddToClassPathAction addAction = new AddToClassPathAction(loader, Arrays.asList(StringUtils.split(libjars, ",")));
loader = AccessController.doPrivileged(addAction);
}
conf.setClassLoader(loader);
// Also set this to the Thread ContextClassLoader, so new threads will
// inherit
// this class loader, and propagate into newly created Configurations by
// those
// new threads.
Thread.currentThread().setContextClassLoader(loader);
} catch (Exception e) {
throw new HiveException(e.getMessage(), e);
}
int ret;
if (localtask) {
memoryMXBean = ManagementFactory.getMemoryMXBean();
MapredLocalWork plan = SerializationUtilities.deserializePlan(pathData, MapredLocalWork.class);
MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
ed.initialize(null, null, new TaskQueue(), null);
ret = ed.executeInProcess();
} else {
MapredWork plan = SerializationUtilities.deserializePlan(pathData, MapredWork.class);
ExecDriver ed = new ExecDriver(plan, conf, isSilent);
ed.setTaskQueue(new TaskQueue());
ret = ed.execute();
}
if (ret != 0) {
System.exit(ret);
}
}
Aggregations