use of com.esotericsoftware.kryo.Kryo in project hive by apache.
the class Utilities method getBaseWork.
/**
* Returns the Map or Reduce plan
* Side effect: the BaseWork returned is also placed in the gWorkMap
* @param conf
* @param name
* @return BaseWork based on the name supplied will return null if name is null
* @throws RuntimeException if the configuration files are not proper or if plan can not be loaded
*/
private static BaseWork getBaseWork(Configuration conf, String name) {
Path path = null;
InputStream in = null;
Kryo kryo = SerializationUtilities.borrowKryo();
try {
String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
if (engine.equals("spark")) {
// TODO Add jar into current thread context classloader as it may be invoked by Spark driver inside
// threads, should be unnecessary while SPARK-5377 is resolved.
String addedJars = conf.get(HIVE_ADDED_JARS);
if (addedJars != null && !addedJars.isEmpty()) {
ClassLoader loader = Thread.currentThread().getContextClassLoader();
ClassLoader newLoader = addToClassPath(loader, addedJars.split(";"));
Thread.currentThread().setContextClassLoader(newLoader);
kryo.setClassLoader(newLoader);
}
}
path = getPlanPath(conf, name);
LOG.info("PLAN PATH = " + path);
if (path == null) {
// Map/reduce plan may not be generated
return null;
}
BaseWork gWork = gWorkMap.get(conf).get(path);
if (gWork == null) {
Path localPath = path;
LOG.debug("local path = " + localPath);
final long serializedSize;
final String planMode;
if (HiveConf.getBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN)) {
LOG.debug("Loading plan from string: " + path.toUri().getPath());
String planString = conf.getRaw(path.toUri().getPath());
if (planString == null) {
LOG.info("Could not find plan string in conf");
return null;
}
serializedSize = planString.length();
planMode = "RPC";
byte[] planBytes = Base64.decodeBase64(planString);
in = new ByteArrayInputStream(planBytes);
in = new InflaterInputStream(in);
} else {
LOG.debug("Open file to read in plan: " + localPath);
FileSystem fs = localPath.getFileSystem(conf);
in = fs.open(localPath);
serializedSize = fs.getFileStatus(localPath).getLen();
planMode = "FILE";
}
if (MAP_PLAN_NAME.equals(name)) {
if (ExecMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class);
} else if (MergeFileMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
gWork = SerializationUtilities.deserializePlan(kryo, in, MergeFileWork.class);
} else if (ColumnTruncateMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
gWork = SerializationUtilities.deserializePlan(kryo, in, ColumnTruncateWork.class);
} else if (PartialScanMapper.class.getName().equals(conf.get(MAPRED_MAPPER_CLASS))) {
gWork = SerializationUtilities.deserializePlan(kryo, in, PartialScanWork.class);
} else {
throw new RuntimeException("unable to determine work from configuration ." + MAPRED_MAPPER_CLASS + " was " + conf.get(MAPRED_MAPPER_CLASS));
}
} else if (REDUCE_PLAN_NAME.equals(name)) {
if (ExecReducer.class.getName().equals(conf.get(MAPRED_REDUCER_CLASS))) {
gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class);
} else {
throw new RuntimeException("unable to determine work from configuration ." + MAPRED_REDUCER_CLASS + " was " + conf.get(MAPRED_REDUCER_CLASS));
}
} else if (name.contains(MERGE_PLAN_NAME)) {
if (name.startsWith(MAPNAME)) {
gWork = SerializationUtilities.deserializePlan(kryo, in, MapWork.class);
} else if (name.startsWith(REDUCENAME)) {
gWork = SerializationUtilities.deserializePlan(kryo, in, ReduceWork.class);
} else {
throw new RuntimeException("Unknown work type: " + name);
}
}
LOG.info("Deserialized plan (via {}) - name: {} size: {}", planMode, gWork.getName(), humanReadableByteCount(serializedSize));
gWorkMap.get(conf).put(path, gWork);
} else if (LOG.isDebugEnabled()) {
LOG.debug("Found plan in cache for name: " + name);
}
return gWork;
} catch (FileNotFoundException fnf) {
// happens. e.g.: no reduce work.
LOG.debug("No plan file found: " + path + "; " + fnf.getMessage());
return null;
} catch (Exception e) {
String msg = "Failed to load plan: " + path;
LOG.error("Failed to load plan: " + path, e);
throw new RuntimeException(msg, e);
} finally {
SerializationUtilities.releaseKryo(kryo);
if (in != null) {
try {
in.close();
} catch (IOException cantBlameMeForTrying) {
}
}
}
}
use of com.esotericsoftware.kryo.Kryo in project hive by apache.
the class ObjectContainer method add.
public void add(ROW row) {
Kryo kryo = SerializationUtilities.borrowKryo();
try {
kryo.writeClassAndObject(output, row);
} finally {
SerializationUtilities.releaseKryo(kryo);
}
rowsOnDisk++;
}
use of com.esotericsoftware.kryo.Kryo in project hive by apache.
the class ObjectContainer method next.
public ROW next() {
Preconditions.checkState(hasNext());
if (!readBufferUsed) {
try {
if (input == null && output != null) {
// Close output stream if open
output.close();
output = null;
FileInputStream fis = null;
try {
fis = new FileInputStream(tmpFile);
input = new Input(fis);
} finally {
if (input == null && fis != null) {
fis.close();
}
}
}
if (input != null) {
// Load next batch from disk
if (rowsOnDisk >= IN_MEMORY_NUM_ROWS) {
rowsInReadBuffer = IN_MEMORY_NUM_ROWS;
} else {
rowsInReadBuffer = rowsOnDisk;
}
Kryo kryo = SerializationUtilities.borrowKryo();
try {
for (int i = 0; i < rowsInReadBuffer; i++) {
readBuffer[i] = (ROW) kryo.readClassAndObject(input);
}
} finally {
SerializationUtilities.releaseKryo(kryo);
}
if (input.eof()) {
input.close();
input = null;
}
readBufferUsed = true;
readCursor = 0;
rowsOnDisk -= rowsInReadBuffer;
}
} catch (Exception e) {
// Clean up the cache
clear();
throw new RuntimeException("Failed to load rows from disk", e);
}
}
ROW row = readBuffer[readCursor];
if (++readCursor >= rowsInReadBuffer) {
readBufferUsed = false;
rowsInReadBuffer = 0;
readCursor = 0;
}
return row;
}
use of com.esotericsoftware.kryo.Kryo in project hive by apache.
the class SerializationUtilities method deserializePlan.
private static <T> T deserializePlan(InputStream in, Class<T> planClass, boolean cloningPlan) {
Kryo kryo = borrowKryo();
T result = null;
try {
result = deserializePlan(kryo, in, planClass, cloningPlan);
} finally {
releaseKryo(kryo);
}
return result;
}
use of com.esotericsoftware.kryo.Kryo in project hive by apache.
the class ExternalCache method getSerializedSargForMetastore.
private ByteBuffer getSerializedSargForMetastore(boolean isOriginal) {
if (sarg == null)
return null;
ByteBuffer serializedSarg = isOriginal ? sargIsOriginal : sargNotIsOriginal;
if (serializedSarg != null)
return serializedSarg;
SearchArgument sarg2 = sarg;
Kryo kryo = SerializationUtilities.borrowKryo();
try {
if ((isOriginal ? sargNotIsOriginal : sargIsOriginal) == null) {
// In case we need it for the other case.
sarg2 = kryo.copy(sarg2);
}
translateSargToTableColIndexes(sarg2, conf, OrcInputFormat.getRootColumn(isOriginal));
ExternalCache.Baos baos = new Baos();
Output output = new Output(baos);
kryo.writeObject(output, sarg2);
output.flush();
serializedSarg = baos.get();
if (isOriginal) {
sargIsOriginal = serializedSarg;
} else {
sargNotIsOriginal = serializedSarg;
}
} finally {
SerializationUtilities.releaseKryo(kryo);
}
return serializedSarg;
}
Aggregations