Search in sources :

Example 6 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class TestVectorSerDeRow method deserializeAndVerify.

void deserializeAndVerify(Output output, DeserializeRead deserializeRead, VectorRandomRowSource source, Object[] expectedRow) throws HiveException, IOException {
    deserializeRead.set(output.getData(), 0, output.getLength());
    PrimitiveCategory[] primitiveCategories = source.primitiveCategories();
    for (int i = 0; i < primitiveCategories.length; i++) {
        Object expected = expectedRow[i];
        PrimitiveCategory primitiveCategory = primitiveCategories[i];
        PrimitiveTypeInfo primitiveTypeInfo = source.primitiveTypeInfos()[i];
        if (!deserializeRead.readNextField()) {
            throw new HiveException("Unexpected NULL when reading primitiveCategory " + primitiveCategory + " expected (" + expected.getClass().getName() + ", " + expected.toString() + ") " + " deserializeRead " + deserializeRead.getClass().getName());
        }
        switch(primitiveCategory) {
            case BOOLEAN:
                {
                    Boolean value = deserializeRead.currentBoolean;
                    BooleanWritable expectedWritable = (BooleanWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case BYTE:
                {
                    Byte value = deserializeRead.currentByte;
                    ByteWritable expectedWritable = (ByteWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                    }
                }
                break;
            case SHORT:
                {
                    Short value = deserializeRead.currentShort;
                    ShortWritable expectedWritable = (ShortWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case INT:
                {
                    Integer value = deserializeRead.currentInt;
                    IntWritable expectedWritable = (IntWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case LONG:
                {
                    Long value = deserializeRead.currentLong;
                    LongWritable expectedWritable = (LongWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case DATE:
                {
                    DateWritable value = deserializeRead.currentDateWritable;
                    DateWritable expectedWritable = (DateWritable) expected;
                    if (!value.equals(expectedWritable)) {
                        TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                    }
                }
                break;
            case FLOAT:
                {
                    Float value = deserializeRead.currentFloat;
                    FloatWritable expectedWritable = (FloatWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case DOUBLE:
                {
                    Double value = deserializeRead.currentDouble;
                    DoubleWritable expectedWritable = (DoubleWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case STRING:
            case CHAR:
            case VARCHAR:
            case BINARY:
                {
                    byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                    Text text = new Text(stringBytes);
                    String string = text.toString();
                    switch(primitiveCategory) {
                        case STRING:
                            {
                                Text expectedWritable = (Text) expected;
                                if (!string.equals(expectedWritable.toString())) {
                                    TestCase.fail("String field mismatch (expected '" + expectedWritable.toString() + "' found '" + string + "')");
                                }
                            }
                            break;
                        case CHAR:
                            {
                                HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                                HiveCharWritable expectedWritable = (HiveCharWritable) expected;
                                if (!hiveChar.equals(expectedWritable.getHiveChar())) {
                                    TestCase.fail("Char field mismatch (expected '" + expectedWritable.getHiveChar() + "' found '" + hiveChar + "')");
                                }
                            }
                            break;
                        case VARCHAR:
                            {
                                HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                                HiveVarcharWritable expectedWritable = (HiveVarcharWritable) expected;
                                if (!hiveVarchar.equals(expectedWritable.getHiveVarchar())) {
                                    TestCase.fail("Varchar field mismatch (expected '" + expectedWritable.getHiveVarchar() + "' found '" + hiveVarchar + "')");
                                }
                            }
                            break;
                        case BINARY:
                            {
                                BytesWritable expectedWritable = (BytesWritable) expected;
                                if (stringBytes.length != expectedWritable.getLength()) {
                                    TestCase.fail("Byte Array field mismatch (expected " + expected + " found " + stringBytes + ")");
                                }
                                byte[] expectedBytes = expectedWritable.getBytes();
                                for (int b = 0; b < stringBytes.length; b++) {
                                    if (stringBytes[b] != expectedBytes[b]) {
                                        TestCase.fail("Byte Array field mismatch (expected " + expected + " found " + stringBytes + ")");
                                    }
                                }
                            }
                            break;
                        default:
                            throw new HiveException("Unexpected primitive category " + primitiveCategory);
                    }
                }
                break;
            case DECIMAL:
                {
                    HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                    if (value == null) {
                        TestCase.fail("Decimal field evaluated to NULL");
                    }
                    HiveDecimalWritable expectedWritable = (HiveDecimalWritable) expected;
                    if (!value.equals(expectedWritable.getHiveDecimal())) {
                        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                        int precision = decimalTypeInfo.getPrecision();
                        int scale = decimalTypeInfo.getScale();
                        TestCase.fail("Decimal field mismatch (expected " + expectedWritable.getHiveDecimal() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                    }
                }
                break;
            case TIMESTAMP:
                {
                    Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                    TimestampWritable expectedWritable = (TimestampWritable) expected;
                    if (!value.equals(expectedWritable.getTimestamp())) {
                        TestCase.fail("Timestamp field mismatch (expected " + expectedWritable.getTimestamp() + " found " + value.toString() + ")");
                    }
                }
                break;
            case INTERVAL_YEAR_MONTH:
                {
                    HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                    HiveIntervalYearMonthWritable expectedWritable = (HiveIntervalYearMonthWritable) expected;
                    HiveIntervalYearMonth expectedValue = expectedWritable.getHiveIntervalYearMonth();
                    if (!value.equals(expectedValue)) {
                        TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expectedValue + " found " + value.toString() + ")");
                    }
                }
                break;
            case INTERVAL_DAY_TIME:
                {
                    HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                    HiveIntervalDayTimeWritable expectedWritable = (HiveIntervalDayTimeWritable) expected;
                    HiveIntervalDayTime expectedValue = expectedWritable.getHiveIntervalDayTime();
                    if (!value.equals(expectedValue)) {
                        TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expectedValue + " found " + value.toString() + ")");
                    }
                }
                break;
            default:
                throw new HiveException("Unexpected primitive category " + primitiveCategory);
        }
    }
    TestCase.assertTrue(deserializeRead.isEndOfInputReached());
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 7 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class TestVectorGroupByOperator method testMemoryPressureFlush.

@Test
public void testMemoryPressureFlush() throws HiveException {
    List<String> mapColumnNames = new ArrayList<String>();
    mapColumnNames.add("Key");
    mapColumnNames.add("Value");
    VectorizationContext ctx = new VectorizationContext("name", mapColumnNames);
    GroupByDesc desc = buildKeyGroupByDesc(ctx, "max", "Value", TypeInfoFactory.longTypeInfo, "Key", TypeInfoFactory.longTypeInfo);
    // Set the memory treshold so that we get 100Kb before we need to flush.
    MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
    long maxMemory = memoryMXBean.getHeapMemoryUsage().getMax();
    float treshold = 100.0f * 1024.0f / maxMemory;
    desc.setMemoryThreshold(treshold);
    CompilationOpContext cCtx = new CompilationOpContext();
    Operator<? extends OperatorDesc> groupByOp = OperatorFactory.get(cCtx, desc);
    VectorGroupByOperator vgo = (VectorGroupByOperator) Vectorizer.vectorizeGroupByOperator(groupByOp, ctx);
    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(cCtx, vgo);
    vgo.initialize(hconf, null);
    this.outputRowCount = 0;
    out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {

        @Override
        public void inspectRow(Object row, int tag) throws HiveException {
            ++outputRowCount;
        }
    });
    Iterable<Object> it = new Iterable<Object>() {

        @Override
        public Iterator<Object> iterator() {
            return new Iterator<Object>() {

                long value = 0;

                @Override
                public boolean hasNext() {
                    return true;
                }

                @Override
                public Object next() {
                    return ++value;
                }

                @Override
                public void remove() {
                }
            };
        }
    };
    FakeVectorRowBatchFromObjectIterables data = new FakeVectorRowBatchFromObjectIterables(100, new String[] { "long", "long" }, it, it);
    // The 'it' data source will produce data w/o ever ending
    // We want to see that memory pressure kicks in and some
    // entries in the VGBY are flushed.
    long countRowsProduced = 0;
    for (VectorizedRowBatch unit : data) {
        countRowsProduced += 100;
        vgo.process(unit, 0);
        if (0 < outputRowCount) {
            break;
        }
        // Set an upper bound how much we're willing to push before it should flush
        // we've set the memory treshold at 100kb, each key is distinct
        // It should not go beyond 100k/16 (key+data)
        assertTrue(countRowsProduced < 100 * 1024 / 16);
    }
    assertTrue(0 < outputRowCount);
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ArrayList(java.util.ArrayList) MemoryMXBean(java.lang.management.MemoryMXBean) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) Iterator(java.util.Iterator) FakeVectorRowBatchFromObjectIterables(org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromObjectIterables) VectorGroupByDesc(org.apache.hadoop.hive.ql.plan.VectorGroupByDesc) GroupByDesc(org.apache.hadoop.hive.ql.plan.GroupByDesc) FakeCaptureOutputOperator(org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator) Test(org.junit.Test)

Example 8 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class TestVectorUDFAdaptor method testMultiArgumentUDF.

@Test
public void testMultiArgumentUDF() {
    // create a syntax tree for a function call "testudf(col0, col1, col2)"
    ExprNodeGenericFuncDesc funcDesc;
    TypeInfo typeInfoStr = TypeInfoFactory.stringTypeInfo;
    TypeInfo typeInfoLong = TypeInfoFactory.longTypeInfo;
    TypeInfo typeInfoDbl = TypeInfoFactory.doubleTypeInfo;
    GenericUDFBridge genericUDFBridge = new GenericUDFBridge("testudf", false, ConcatTextLongDoubleUDF.class.getName());
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(new ExprNodeColumnDesc(typeInfoStr, "col0", "tablename", false));
    children.add(new ExprNodeColumnDesc(typeInfoLong, "col1", "tablename", false));
    children.add(new ExprNodeColumnDesc(typeInfoDbl, "col2", "tablename", false));
    VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[3];
    for (int i = 0; i < 3; i++) {
        argDescs[i] = new VectorUDFArgDesc();
        argDescs[i].setVariable(i);
    }
    funcDesc = new ExprNodeGenericFuncDesc(typeInfoStr, genericUDFBridge, genericUDFBridge.getUdfName(), children);
    // create the adaptor for this function call to work in vector mode
    VectorUDFAdaptor vudf = null;
    try {
        vudf = new VectorUDFAdaptor(funcDesc, 3, "String", argDescs);
    } catch (HiveException e) {
        // We should never get here.
        assertTrue(false);
        throw new RuntimeException(e);
    }
    // with no nulls
    VectorizedRowBatch b = getBatchStrDblLongWithStrOut();
    vudf.evaluate(b);
    byte[] result = null;
    byte[] result2 = null;
    try {
        result = "red:1:1.0".getBytes("UTF-8");
        result2 = "blue:0:0.0".getBytes("UTF-8");
    } catch (Exception e) {
        ;
    }
    BytesColumnVector out = (BytesColumnVector) b.cols[3];
    int cmp = StringExpr.compare(result, 0, result.length, out.vector[1], out.start[1], out.length[1]);
    assertEquals(0, cmp);
    assertTrue(out.noNulls);
    // with nulls
    b = getBatchStrDblLongWithStrOut();
    b.cols[1].noNulls = false;
    vudf.evaluate(b);
    out = (BytesColumnVector) b.cols[3];
    assertFalse(out.noNulls);
    assertTrue(out.isNull[1]);
    // with all input columns repeating
    b = getBatchStrDblLongWithStrOut();
    b.cols[0].isRepeating = true;
    b.cols[1].isRepeating = true;
    b.cols[2].isRepeating = true;
    vudf.evaluate(b);
    out = (BytesColumnVector) b.cols[3];
    assertTrue(out.isRepeating);
    cmp = StringExpr.compare(result2, 0, result2.length, out.vector[0], out.start[0], out.length[0]);
    assertEquals(0, cmp);
    assertTrue(out.noNulls);
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenericUDFBridge(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) BytesColumnVector(org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) ConcatTextLongDoubleUDF(org.apache.hadoop.hive.ql.exec.vector.udf.legacy.ConcatTextLongDoubleUDF) Test(org.junit.Test)

Example 9 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class TestVectorUDFAdaptor method testLongUDF.

@Test
public void testLongUDF() {
    // create a syntax tree for a simple function call "longudf(col0)"
    ExprNodeGenericFuncDesc funcDesc;
    TypeInfo typeInfo = TypeInfoFactory.longTypeInfo;
    GenericUDFBridge genericUDFBridge = new GenericUDFBridge("longudf", false, LongUDF.class.getName());
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    ExprNodeColumnDesc colDesc = new ExprNodeColumnDesc(typeInfo, "col0", "tablename", false);
    children.add(colDesc);
    VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[1];
    argDescs[0] = new VectorUDFArgDesc();
    argDescs[0].setVariable(0);
    funcDesc = new ExprNodeGenericFuncDesc(typeInfo, genericUDFBridge, genericUDFBridge.getUdfName(), children);
    // create the adaptor for this function call to work in vector mode
    VectorUDFAdaptor vudf = null;
    try {
        vudf = new VectorUDFAdaptor(funcDesc, 1, "Long", argDescs);
    } catch (HiveException e) {
        // We should never get here.
        assertTrue(false);
    }
    VectorizedRowBatch b = getBatchLongInLongOut();
    vudf.evaluate(b);
    // verify output
    LongColumnVector out = (LongColumnVector) b.cols[1];
    assertEquals(1000, out.vector[0]);
    assertEquals(1001, out.vector[1]);
    assertEquals(1002, out.vector[2]);
    assertTrue(out.noNulls);
    assertFalse(out.isRepeating);
    // with nulls
    b = getBatchLongInLongOut();
    out = (LongColumnVector) b.cols[1];
    b.cols[0].noNulls = false;
    vudf.evaluate(b);
    assertFalse(out.noNulls);
    assertEquals(1000, out.vector[0]);
    assertEquals(1001, out.vector[1]);
    assertTrue(out.isNull[2]);
    assertFalse(out.isRepeating);
    // with repeating
    b = getBatchLongInLongOut();
    out = (LongColumnVector) b.cols[1];
    b.cols[0].isRepeating = true;
    vudf.evaluate(b);
    // The implementation may or may not set output it isRepeting.
    // That is implementation-defined.
    assertTrue(b.cols[1].isRepeating && out.vector[0] == 1000 || !b.cols[1].isRepeating && out.vector[2] == 1000);
    assertEquals(3, b.size);
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenericUDFBridge(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) LongUDF(org.apache.hadoop.hive.ql.exec.vector.udf.legacy.LongUDF) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) Test(org.junit.Test)

Example 10 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class GenericUDFReflect method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    // Try to find the class
    // Skip class loading if the class name didn't change
    boolean classNameChanged = false;
    Object newClassName = arguments[0].get();
    // name/method name is constant Java String, or constant Text (StringWritable).
    if (className == null || ObjectInspectorUtils.compare(className, classNameOI, newClassName, inputClassNameOI) != 0) {
        className = ObjectInspectorUtils.copyToStandardObject(newClassName, inputClassNameOI);
        String classNameString = classNameOI.getPrimitiveJavaObject(className);
        try {
            c = JavaUtils.loadClass(classNameString);
        } catch (ClassNotFoundException ex) {
            throw new HiveException("UDFReflect evaluate ", ex);
        }
        try {
            o = null;
            o = ReflectionUtils.newInstance(c, null);
        } catch (Exception e) {
        // ignored
        }
        classNameChanged = true;
    }
    // Try to find the method
    // Skip method finding if the method name didn't change, and class name didn't change.
    Object newMethodName = arguments[1].get();
    if (methodName == null || ObjectInspectorUtils.compare(methodName, methodNameOI, newMethodName, inputMethodNameOI) != 0 || classNameChanged) {
        methodName = ObjectInspectorUtils.copyToStandardObject(newMethodName, inputMethodNameOI);
        String methodNameString = methodNameOI.getPrimitiveJavaObject(methodName);
        try {
            m = findMethod(c, methodNameString, String.class, false);
        } catch (Exception e) {
            throw new HiveException("UDFReflect getMethod ", e);
        }
    }
    Object[] parameterJavaValues = setupParameters(arguments, 2);
    try {
        return String.valueOf(m.invoke(o, parameterJavaValues));
    } catch (Exception e1) {
        System.err.println("UDFReflect evaluate " + e1 + " method = " + m + " args = " + Arrays.asList(parameterJavaValues));
    }
    return null;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Aggregations

HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)364 IOException (java.io.IOException)144 ArrayList (java.util.ArrayList)64 Table (org.apache.hadoop.hive.ql.metadata.Table)60 Path (org.apache.hadoop.fs.Path)55 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)42 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)41 Partition (org.apache.hadoop.hive.ql.metadata.Partition)36 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)35 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)35 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)31 FileNotFoundException (java.io.FileNotFoundException)26 FileSystem (org.apache.hadoop.fs.FileSystem)26 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)26 URISyntaxException (java.net.URISyntaxException)25 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)25 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)24 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)23 HashMap (java.util.HashMap)21 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)21