Search in sources :

Example 56 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class VectorUDAFBloomFilter method evaluateOutput.

@Override
public Object evaluateOutput(AggregationBuffer agg) throws HiveException {
    try {
        Aggregation bfAgg = (Aggregation) agg;
        byteStream.reset();
        BloomFilter.serialize(byteStream, bfAgg.bf);
        byte[] bytes = byteStream.toByteArray();
        bw.set(bytes, 0, bytes.length);
        return bw;
    } catch (IOException err) {
        throw new HiveException("Error encountered while serializing bloomfilter", err);
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) IOException(java.io.IOException)

Example 57 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class HiveFileFormatUtils method getHiveRecordWriter.

public static RecordWriter getHiveRecordWriter(JobConf jc, TableDesc tableInfo, Class<? extends Writable> outputClass, FileSinkDesc conf, Path outPath, Reporter reporter) throws HiveException {
    HiveOutputFormat<?, ?> hiveOutputFormat = getHiveOutputFormat(jc, tableInfo);
    try {
        boolean isCompressed = conf.getCompressed();
        JobConf jc_output = jc;
        if (isCompressed) {
            jc_output = new JobConf(jc);
            String codecStr = conf.getCompressCodec();
            if (codecStr != null && !codecStr.trim().equals("")) {
                Class<? extends CompressionCodec> codec = (Class<? extends CompressionCodec>) JavaUtils.loadClass(codecStr);
                FileOutputFormat.setOutputCompressorClass(jc_output, codec);
            }
            String type = conf.getCompressType();
            if (type != null && !type.trim().equals("")) {
                CompressionType style = CompressionType.valueOf(type);
                SequenceFileOutputFormat.setOutputCompressionType(jc, style);
            }
        }
        return getRecordWriter(jc_output, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), outPath, reporter);
    } catch (Exception e) {
        throw new HiveException(e);
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) CompressionCodec(org.apache.hadoop.io.compress.CompressionCodec) JobConf(org.apache.hadoop.mapred.JobConf) CompressionType(org.apache.hadoop.io.SequenceFile.CompressionType) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) FileSystemNotFoundException(java.nio.file.FileSystemNotFoundException) IOException(java.io.IOException)

Example 58 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class MergeFileMapper method configure.

@Override
public void configure(JobConf job) {
    jc = job;
    MapWork mapWork = Utilities.getMapWork(job);
    try {
        if (mapWork instanceof MergeFileWork) {
            MergeFileWork mfWork = (MergeFileWork) mapWork;
            String alias = mfWork.getAliasToWork().keySet().iterator().next();
            op = mfWork.getAliasToWork().get(alias);
            if (op instanceof AbstractFileMergeOperator) {
                mergeOp = (AbstractFileMergeOperator) op;
                mergeOp.initializeOp(jc);
                row = new Object[2];
                abort = false;
            } else {
                abort = true;
                throw new RuntimeException("Merge file work's top operator should be an" + " instance of AbstractFileMergeOperator");
            }
        } else {
            abort = true;
            throw new RuntimeException("Map work should be a merge file work.");
        }
    } catch (HiveException e) {
        abort = true;
        throw new RuntimeException(e);
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) MapWork(org.apache.hadoop.hive.ql.plan.MapWork) AbstractFileMergeOperator(org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator)

Example 59 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class GenericUDFReflect method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    // Try to find the class
    // Skip class loading if the class name didn't change
    boolean classNameChanged = false;
    Object newClassName = arguments[0].get();
    // name/method name is constant Java String, or constant Text (StringWritable).
    if (className == null || ObjectInspectorUtils.compare(className, classNameOI, newClassName, inputClassNameOI) != 0) {
        className = ObjectInspectorUtils.copyToStandardObject(newClassName, inputClassNameOI);
        String classNameString = classNameOI.getPrimitiveJavaObject(className);
        try {
            c = JavaUtils.loadClass(classNameString);
        } catch (ClassNotFoundException ex) {
            throw new HiveException("UDFReflect evaluate ", ex);
        }
        try {
            o = null;
            o = ReflectionUtils.newInstance(c, null);
        } catch (Exception e) {
        // ignored
        }
        classNameChanged = true;
    }
    // Try to find the method
    // Skip method finding if the method name didn't change, and class name didn't change.
    Object newMethodName = arguments[1].get();
    if (methodName == null || ObjectInspectorUtils.compare(methodName, methodNameOI, newMethodName, inputMethodNameOI) != 0 || classNameChanged) {
        methodName = ObjectInspectorUtils.copyToStandardObject(newMethodName, inputMethodNameOI);
        String methodNameString = methodNameOI.getPrimitiveJavaObject(methodName);
        try {
            m = findMethod(c, methodNameString, String.class, false);
        } catch (Exception e) {
            throw new HiveException("UDFReflect getMethod ", e);
        }
    }
    Object[] parameterJavaValues = setupParameters(arguments, 2);
    try {
        return String.valueOf(m.invoke(o, parameterJavaValues));
    } catch (Exception e1) {
        System.err.println("UDFReflect evaluate " + e1 + " method = " + m + " args = " + Arrays.asList(parameterJavaValues));
    }
    return null;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 60 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class GenericUDFMacro method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    checkNotNull(colTypes, "colTypes");
    checkNotNull(arguments, "arguments");
    checkNotNull(bodyDesc, "bodyDesc");
    if (colTypes.size() != arguments.length) {
        throw new UDFArgumentLengthException("The macro " + macroName + " accepts exactly " + colTypes.size() + " arguments.");
    }
    try {
        body = ExprNodeEvaluatorFactory.get(bodyDesc);
    } catch (HiveException ex) {
        throw new UDFArgumentException(ex);
    }
    converters = new ObjectInspectorConverters.Converter[arguments.length];
    ArrayList<ObjectInspector> colObjectInspectors = new ArrayList<ObjectInspector>(colTypes.size());
    for (int index = 0; index < arguments.length; ++index) {
        ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(colTypes.get(index));
        colObjectInspectors.add(objectInspector);
        converters[index] = ObjectInspectorConverters.getConverter(arguments[index], objectInspector);
    }
    evaluatedArguments = new ArrayList<Object>(arguments.length);
    ObjectInspector structOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames, colObjectInspectors);
    try {
        return body.initialize(structOI);
    } catch (HiveException ex) {
        throw new UDFArgumentException(ex);
    }
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) ObjectInspectorConverters(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters) ArrayList(java.util.ArrayList)

Aggregations

HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)451 IOException (java.io.IOException)172 ArrayList (java.util.ArrayList)81 Path (org.apache.hadoop.fs.Path)68 Table (org.apache.hadoop.hive.ql.metadata.Table)65 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)46 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)45 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)45 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)42 Partition (org.apache.hadoop.hive.ql.metadata.Partition)39 FileSystem (org.apache.hadoop.fs.FileSystem)31 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)29 LinkedHashMap (java.util.LinkedHashMap)28 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)28 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)28 FileNotFoundException (java.io.FileNotFoundException)27 URISyntaxException (java.net.URISyntaxException)27 HashMap (java.util.HashMap)26 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)23 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)23