use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class VectorUDAFBloomFilter method evaluateOutput.
@Override
public Object evaluateOutput(AggregationBuffer agg) throws HiveException {
try {
Aggregation bfAgg = (Aggregation) agg;
byteStream.reset();
BloomFilter.serialize(byteStream, bfAgg.bf);
byte[] bytes = byteStream.toByteArray();
bw.set(bytes, 0, bytes.length);
return bw;
} catch (IOException err) {
throw new HiveException("Error encountered while serializing bloomfilter", err);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class HiveFileFormatUtils method getHiveRecordWriter.
public static RecordWriter getHiveRecordWriter(JobConf jc, TableDesc tableInfo, Class<? extends Writable> outputClass, FileSinkDesc conf, Path outPath, Reporter reporter) throws HiveException {
HiveOutputFormat<?, ?> hiveOutputFormat = getHiveOutputFormat(jc, tableInfo);
try {
boolean isCompressed = conf.getCompressed();
JobConf jc_output = jc;
if (isCompressed) {
jc_output = new JobConf(jc);
String codecStr = conf.getCompressCodec();
if (codecStr != null && !codecStr.trim().equals("")) {
Class<? extends CompressionCodec> codec = (Class<? extends CompressionCodec>) JavaUtils.loadClass(codecStr);
FileOutputFormat.setOutputCompressorClass(jc_output, codec);
}
String type = conf.getCompressType();
if (type != null && !type.trim().equals("")) {
CompressionType style = CompressionType.valueOf(type);
SequenceFileOutputFormat.setOutputCompressionType(jc, style);
}
}
return getRecordWriter(jc_output, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), outPath, reporter);
} catch (Exception e) {
throw new HiveException(e);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class MergeFileMapper method configure.
@Override
public void configure(JobConf job) {
jc = job;
MapWork mapWork = Utilities.getMapWork(job);
try {
if (mapWork instanceof MergeFileWork) {
MergeFileWork mfWork = (MergeFileWork) mapWork;
String alias = mfWork.getAliasToWork().keySet().iterator().next();
op = mfWork.getAliasToWork().get(alias);
if (op instanceof AbstractFileMergeOperator) {
mergeOp = (AbstractFileMergeOperator) op;
mergeOp.initializeOp(jc);
row = new Object[2];
abort = false;
} else {
abort = true;
throw new RuntimeException("Merge file work's top operator should be an" + " instance of AbstractFileMergeOperator");
}
} else {
abort = true;
throw new RuntimeException("Map work should be a merge file work.");
}
} catch (HiveException e) {
abort = true;
throw new RuntimeException(e);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class GenericUDFReflect method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
// Try to find the class
// Skip class loading if the class name didn't change
boolean classNameChanged = false;
Object newClassName = arguments[0].get();
// name/method name is constant Java String, or constant Text (StringWritable).
if (className == null || ObjectInspectorUtils.compare(className, classNameOI, newClassName, inputClassNameOI) != 0) {
className = ObjectInspectorUtils.copyToStandardObject(newClassName, inputClassNameOI);
String classNameString = classNameOI.getPrimitiveJavaObject(className);
try {
c = JavaUtils.loadClass(classNameString);
} catch (ClassNotFoundException ex) {
throw new HiveException("UDFReflect evaluate ", ex);
}
try {
o = null;
o = ReflectionUtils.newInstance(c, null);
} catch (Exception e) {
// ignored
}
classNameChanged = true;
}
// Try to find the method
// Skip method finding if the method name didn't change, and class name didn't change.
Object newMethodName = arguments[1].get();
if (methodName == null || ObjectInspectorUtils.compare(methodName, methodNameOI, newMethodName, inputMethodNameOI) != 0 || classNameChanged) {
methodName = ObjectInspectorUtils.copyToStandardObject(newMethodName, inputMethodNameOI);
String methodNameString = methodNameOI.getPrimitiveJavaObject(methodName);
try {
m = findMethod(c, methodNameString, String.class, false);
} catch (Exception e) {
throw new HiveException("UDFReflect getMethod ", e);
}
}
Object[] parameterJavaValues = setupParameters(arguments, 2);
try {
return String.valueOf(m.invoke(o, parameterJavaValues));
} catch (Exception e1) {
System.err.println("UDFReflect evaluate " + e1 + " method = " + m + " args = " + Arrays.asList(parameterJavaValues));
}
return null;
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class GenericUDFMacro method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkNotNull(colTypes, "colTypes");
checkNotNull(arguments, "arguments");
checkNotNull(bodyDesc, "bodyDesc");
if (colTypes.size() != arguments.length) {
throw new UDFArgumentLengthException("The macro " + macroName + " accepts exactly " + colTypes.size() + " arguments.");
}
try {
body = ExprNodeEvaluatorFactory.get(bodyDesc);
} catch (HiveException ex) {
throw new UDFArgumentException(ex);
}
converters = new ObjectInspectorConverters.Converter[arguments.length];
ArrayList<ObjectInspector> colObjectInspectors = new ArrayList<ObjectInspector>(colTypes.size());
for (int index = 0; index < arguments.length; ++index) {
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(colTypes.get(index));
colObjectInspectors.add(objectInspector);
converters[index] = ObjectInspectorConverters.getConverter(arguments[index], objectInspector);
}
evaluatedArguments = new ArrayList<Object>(arguments.length);
ObjectInspector structOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames, colObjectInspectors);
try {
return body.initialize(structOI);
} catch (HiveException ex) {
throw new UDFArgumentException(ex);
}
}
Aggregations