use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class Utilities method createEmptyBuckets.
/**
* Check the existence of buckets according to bucket specification. Create empty buckets if
* needed.
*
* @param hconf
* @param paths A list of empty buckets to create
* @param conf The definition of the FileSink.
* @param reporter The mapreduce reporter object
* @throws HiveException
* @throws IOException
*/
private static void createEmptyBuckets(Configuration hconf, List<Path> paths, FileSinkDesc conf, Reporter reporter) throws HiveException, IOException {
JobConf jc;
if (hconf instanceof JobConf) {
jc = new JobConf(hconf);
} else {
// test code path
jc = new JobConf(hconf);
}
HiveOutputFormat<?, ?> hiveOutputFormat = null;
Class<? extends Writable> outputClass = null;
boolean isCompressed = conf.getCompressed();
TableDesc tableInfo = conf.getTableInfo();
try {
Serializer serializer = (Serializer) tableInfo.getDeserializerClass().newInstance();
serializer.initialize(null, tableInfo.getProperties());
outputClass = serializer.getSerializedClass();
hiveOutputFormat = HiveFileFormatUtils.getHiveOutputFormat(hconf, conf.getTableInfo());
} catch (SerDeException e) {
throw new HiveException(e);
} catch (InstantiationException e) {
throw new HiveException(e);
} catch (IllegalAccessException e) {
throw new HiveException(e);
}
for (Path path : paths) {
RecordWriter writer = HiveFileFormatUtils.getRecordWriter(jc, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), path, reporter);
writer.close(false);
LOG.info("created empty bucket for enforcing bucketing at " + path);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class ScriptOperator method close.
@Override
public void close(boolean abort) throws HiveException {
boolean new_abort = abort;
if (!abort) {
if (scriptError != null) {
throw new HiveException(ErrorMsg.SCRIPT_GENERIC_ERROR.getErrorCodedMsg(), scriptError);
}
// everything ok. try normal shutdown
try {
try {
if (scriptOutWriter != null) {
scriptOutWriter.close();
}
} catch (IOException e) {
if (isBrokenPipeException(e) && allowPartialConsumption()) {
LOG.warn("Got broken pipe: ignoring exception");
} else {
if (isBrokenPipeException(e)) {
displayBrokenPipeInfo();
}
throw e;
}
}
int exitVal = 0;
if (scriptPid != null) {
exitVal = scriptPid.waitFor();
}
if (exitVal != 0) {
LOG.error("Script failed with code " + exitVal);
new_abort = true;
}
} catch (IOException e) {
LOG.error("Got ioexception: " + e.getMessage());
e.printStackTrace();
new_abort = true;
} catch (InterruptedException e) {
}
} else {
// error code of the child process if possible.
try {
// Interrupt the current thread after 1 second
final Thread mythread = Thread.currentThread();
Timer timer = new Timer(true);
timer.schedule(new TimerTask() {
@Override
public void run() {
mythread.interrupt();
}
}, 1000);
// Wait for the child process to finish
int exitVal = 0;
if (scriptPid != null) {
scriptPid.waitFor();
}
// Cancel the timer
timer.cancel();
// Output the exit code
LOG.error("Script exited with code " + exitVal);
} catch (InterruptedException e) {
// Ignore
LOG.error("Script has not exited yet. It will be killed.");
}
}
// try these best effort
try {
if (outThread != null) {
outThread.join(0);
}
} catch (Exception e) {
LOG.warn("Exception in closing outThread: " + StringUtils.stringifyException(e));
}
try {
if (errThread != null) {
errThread.join(0);
}
} catch (Exception e) {
LOG.warn("Exception in closing errThread: " + StringUtils.stringifyException(e));
}
try {
if (scriptPid != null) {
scriptPid.destroy();
}
} catch (Exception e) {
LOG.warn("Exception in destroying scriptPid: " + StringUtils.stringifyException(e));
}
super.close(new_abort);
if (new_abort && !abort) {
throw new HiveException(ErrorMsg.SCRIPT_CLOSING_ERROR.getErrorCodedMsg());
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class RCFileMergeOperator method closeOp.
@Override
public void closeOp(boolean abort) throws HiveException {
// close writer
if (outWriter == null) {
return;
}
try {
outWriter.close();
} catch (IOException e) {
throw new HiveException("Unable to close RCFileMergeOperator", e);
}
outWriter = null;
super.closeOp(abort);
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class ObjectCache method retrieve.
@SuppressWarnings("unchecked")
@Override
public <T> T retrieve(String key, Callable<T> fn) throws HiveException {
T value;
try {
value = (T) registry.get(key);
if (value == null) {
value = fn.call();
LOG.info("Caching key: " + key);
registry.cacheForVertex(key, value);
} else {
LOG.info("Found " + key + " in cache with value: " + value);
}
} catch (Exception e) {
throw new HiveException(e);
}
return value;
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class SparkReduceRecordHandler method processVectors.
/**
* @param values
* @return true if it is not done and can take more inputs
*/
private <E> boolean processVectors(Iterator<E> values, byte tag) throws HiveException {
VectorizedRowBatch batch = batches[tag];
batch.reset();
buffer.reset();
/* deserialize key into columns */
VectorizedBatchUtil.addRowToBatchFrom(keyObject, keyStructInspector, 0, 0, batch, buffer);
for (int i = 0; i < keysColumnOffset; i++) {
VectorizedBatchUtil.setRepeatingColumn(batch, i);
}
int rowIdx = 0;
try {
while (values.hasNext()) {
/* deserialize value into columns */
BytesWritable valueWritable = (BytesWritable) values.next();
Object valueObj = deserializeValue(valueWritable, tag);
VectorizedBatchUtil.addRowToBatchFrom(valueObj, valueStructInspectors[tag], rowIdx, keysColumnOffset, batch, buffer);
rowIdx++;
if (rowIdx >= BATCH_SIZE) {
VectorizedBatchUtil.setBatchSize(batch, rowIdx);
reducer.process(batch, tag);
rowIdx = 0;
if (isLogInfoEnabled) {
logMemoryInfo();
}
}
}
if (rowIdx > 0) {
VectorizedBatchUtil.setBatchSize(batch, rowIdx);
reducer.process(batch, tag);
}
if (isLogInfoEnabled) {
logMemoryInfo();
}
} catch (Exception e) {
String rowString = null;
try {
rowString = batch.toString();
} catch (Exception e2) {
rowString = "[Error getting row data with exception " + StringUtils.stringifyException(e2) + " ]";
}
throw new HiveException("Error while processing vector batch (tag=" + tag + ") " + rowString, e);
}
// give me more
return true;
}
Aggregations