Search in sources :

Example 11 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class GenericUDFMacro method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    checkNotNull(colTypes, "colTypes");
    checkNotNull(arguments, "arguments");
    checkNotNull(bodyDesc, "bodyDesc");
    if (colTypes.size() != arguments.length) {
        throw new UDFArgumentLengthException("The macro " + macroName + " accepts exactly " + colTypes.size() + " arguments.");
    }
    try {
        body = ExprNodeEvaluatorFactory.get(bodyDesc);
    } catch (HiveException ex) {
        throw new UDFArgumentException(ex);
    }
    converters = new ObjectInspectorConverters.Converter[arguments.length];
    ArrayList<ObjectInspector> colObjectInspectors = new ArrayList<ObjectInspector>(colTypes.size());
    for (int index = 0; index < arguments.length; ++index) {
        ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(colTypes.get(index));
        colObjectInspectors.add(objectInspector);
        converters[index] = ObjectInspectorConverters.getConverter(arguments[index], objectInspector);
    }
    evaluatedArguments = new ArrayList<Object>(arguments.length);
    ObjectInspector structOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames, colObjectInspectors);
    try {
        return body.initialize(structOI);
    } catch (HiveException ex) {
        throw new UDFArgumentException(ex);
    }
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) ObjectInspectorConverters(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters) ArrayList(java.util.ArrayList)

Example 12 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class GenericUDFInBloomFilter method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    // Return if either of the arguments is null
    if (arguments[0].get() == null || arguments[1].get() == null) {
        return null;
    }
    if (!initializedBloomFilter) {
        // Setup the bloom filter once
        try {
            BytesWritable bw = (BytesWritable) arguments[1].get();
            byte[] bytes = new byte[bw.getLength()];
            System.arraycopy(bw.getBytes(), 0, bytes, 0, bw.getLength());
            bloomFilter = BloomFilter.deserialize(new ByteArrayInputStream(bytes));
        } catch (IOException e) {
            throw new HiveException(e);
        }
        initializedBloomFilter = true;
    }
    // Check if the value is in bloom filter
    switch(((PrimitiveObjectInspector) valObjectInspector).getTypeInfo().getPrimitiveCategory()) {
        case BOOLEAN:
            boolean vBoolean = ((BooleanObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testLong(vBoolean ? 1 : 0);
        case BYTE:
            byte vByte = ((ByteObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testLong(vByte);
        case SHORT:
            short vShort = ((ShortObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testLong(vShort);
        case INT:
            int vInt = ((IntObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testLong(vInt);
        case LONG:
            long vLong = ((LongObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testLong(vLong);
        case FLOAT:
            float vFloat = ((FloatObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testDouble(vFloat);
        case DOUBLE:
            double vDouble = ((DoubleObjectInspector) valObjectInspector).get(arguments[0].get());
            return bloomFilter.testDouble(vDouble);
        case DECIMAL:
            HiveDecimalWritable vDecimal = ((HiveDecimalObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
            int startIdx = vDecimal.toBytes(scratchBuffer);
            return bloomFilter.testBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
        case DATE:
            DateWritable vDate = ((DateObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
            return bloomFilter.testLong(vDate.getDays());
        case TIMESTAMP:
            Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector).getPrimitiveJavaObject(arguments[0].get());
            return bloomFilter.testLong(vTimeStamp.getTime());
        case CHAR:
            Text vChar = ((HiveCharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getStrippedValue();
            return bloomFilter.testBytes(vChar.getBytes(), 0, vChar.getLength());
        case VARCHAR:
            Text vVarchar = ((HiveVarcharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getTextValue();
            return bloomFilter.testBytes(vVarchar.getBytes(), 0, vVarchar.getLength());
        case STRING:
            Text vString = ((StringObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
            return bloomFilter.testBytes(vString.getBytes(), 0, vString.getLength());
        case BINARY:
            BytesWritable vBytes = ((BinaryObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
            return bloomFilter.testBytes(vBytes.getBytes(), 0, vBytes.getLength());
        default:
            throw new UDFArgumentTypeException(0, "Bad primitive category " + ((PrimitiveTypeInfo) valObjectInspector).getPrimitiveCategory());
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) ByteArrayInputStream(java.io.ByteArrayInputStream)

Example 13 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class HiveHBaseTableInputFormat method getTimestampVal.

private long getTimestampVal(IndexSearchCondition sc) throws IOException {
    long timestamp;
    try {
        ExprNodeConstantEvaluator eval = new ExprNodeConstantEvaluator(sc.getConstantDesc());
        ObjectInspector inspector = eval.initialize(null);
        Object value = eval.evaluate(null);
        if (inspector instanceof LongObjectInspector) {
            timestamp = ((LongObjectInspector) inspector).get(value);
        } else {
            PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector;
            timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime();
        }
    } catch (HiveException e) {
        throw new IOException(e);
    }
    return timestamp;
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ExprNodeConstantEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeConstantEvaluator) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) IOException(java.io.IOException)

Example 14 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class HCatTable method storageHandler.

/**
   * Setter for StorageHandler class.
   */
public HCatTable storageHandler(String storageHandler) throws HCatException {
    this.tblProps.put(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE, storageHandler);
    LOG.warn("HiveStorageHandlers can't be reliably instantiated on the client-side. " + "Attempting to derive Input/OutputFormat settings from StorageHandler, on best effort: ");
    try {
        HiveStorageHandler sh = HiveUtils.getStorageHandler(getConf(), storageHandler);
        this.sd.setInputFormat(sh.getInputFormatClass().getName());
        this.sd.setOutputFormat(sh.getOutputFormatClass().getName());
        this.sd.getSerdeInfo().setSerializationLib(sh.getSerDeClass().getName());
    } catch (HiveException e) {
        LOG.warn("Could not derive Input/OutputFormat and SerDe settings from storageHandler. " + "These values need to be set explicitly.", e);
    }
    return this;
}
Also used : HiveStorageHandler(org.apache.hadoop.hive.ql.metadata.HiveStorageHandler) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 15 with HiveException

use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.

the class SQLAuthorizationUtils method getPrivilegesFromMetaStore.

/**
   * Get the privileges this user(userName argument) has on the object
   * (hivePrivObject argument) If isAdmin is true, adds an admin privilege as
   * well.
   *
   * @param metastoreClient
   * @param userName
   * @param hivePrivObject
   * @param curRoles
   *          current active roles for user
   * @param isAdmin
   *          if user can run as admin user
   * @return
   * @throws HiveAuthzPluginException
   */
static RequiredPrivileges getPrivilegesFromMetaStore(IMetaStoreClient metastoreClient, String userName, HivePrivilegeObject hivePrivObject, List<String> curRoles, boolean isAdmin) throws HiveAuthzPluginException {
    // get privileges for this user and its role on this object
    PrincipalPrivilegeSet thrifPrivs = null;
    try {
        HiveObjectRef objectRef = AuthorizationUtils.getThriftHiveObjectRef(hivePrivObject);
        if (objectRef.getObjectType() == null) {
            objectRef.setObjectType(HiveObjectType.GLOBAL);
        }
        thrifPrivs = metastoreClient.get_privilege_set(objectRef, userName, null);
    } catch (MetaException e) {
        throwGetPrivErr(e, hivePrivObject, userName);
    } catch (TException e) {
        throwGetPrivErr(e, hivePrivObject, userName);
    } catch (HiveException e) {
        throwGetPrivErr(e, hivePrivObject, userName);
    }
    filterPrivsByCurrentRoles(thrifPrivs, curRoles);
    // convert to RequiredPrivileges
    RequiredPrivileges privs = getRequiredPrivsFromThrift(thrifPrivs);
    // add owner privilege if user is owner of the object
    if (isOwner(metastoreClient, userName, curRoles, hivePrivObject)) {
        privs.addPrivilege(SQLPrivTypeGrant.OWNER_PRIV);
    }
    if (isAdmin) {
        privs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
    }
    return privs;
}
Also used : TException(org.apache.thrift.TException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) PrincipalPrivilegeSet(org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet) HiveObjectRef(org.apache.hadoop.hive.metastore.api.HiveObjectRef) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Aggregations

HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)364 IOException (java.io.IOException)144 ArrayList (java.util.ArrayList)64 Table (org.apache.hadoop.hive.ql.metadata.Table)60 Path (org.apache.hadoop.fs.Path)55 SerDeException (org.apache.hadoop.hive.serde2.SerDeException)42 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)41 Partition (org.apache.hadoop.hive.ql.metadata.Partition)36 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)35 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)35 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)31 FileNotFoundException (java.io.FileNotFoundException)26 FileSystem (org.apache.hadoop.fs.FileSystem)26 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)26 URISyntaxException (java.net.URISyntaxException)25 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)25 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)24 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)23 HashMap (java.util.HashMap)21 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)21