use of org.apache.hadoop.io.ReadaheadPool in project hive by apache.
the class HivePreWarmProcessor method run.
@Override
public void run(Map<String, LogicalInput> inputs, Map<String, LogicalOutput> outputs) throws Exception {
if (prewarmed) {
/* container reuse */
return;
}
for (LogicalInput input : inputs.values()) {
input.start();
}
for (LogicalOutput output : outputs.values()) {
output.start();
}
/* these are things that goes through singleton initialization on most queries */
FileSystem fs = FileSystem.get(conf);
Mac mac = Mac.getInstance("HmacSHA1");
ReadaheadPool rpool = ReadaheadPool.getInstance();
ShimLoader.getHadoopShims();
URL hiveurl = new URL("jar:" + DagUtils.getInstance().getExecJarPathLocal() + "!/");
JarURLConnection hiveconn = (JarURLConnection) hiveurl.openConnection();
JarFile hivejar = hiveconn.getJarFile();
try {
Enumeration<JarEntry> classes = hivejar.entries();
while (classes.hasMoreElements()) {
JarEntry je = classes.nextElement();
if (je.getName().endsWith(".class")) {
String klass = je.getName().replace(".class", "").replaceAll("/", "\\.");
if (klass.indexOf("ql.exec") != -1 || klass.indexOf("ql.io") != -1) {
/* several hive classes depend on the metastore APIs, which is not included
* in hive-exec.jar. These are the relatively safe ones - operators & io classes.
*/
if (klass.indexOf("vector") != -1 || klass.indexOf("Operator") != -1) {
JavaUtils.loadClass(klass);
}
}
}
}
} finally {
hivejar.close();
}
prewarmed = true;
}
Aggregations