use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class GenericUDFMacro method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkNotNull(colTypes, "colTypes");
checkNotNull(arguments, "arguments");
checkNotNull(bodyDesc, "bodyDesc");
if (colTypes.size() != arguments.length) {
throw new UDFArgumentLengthException("The macro " + macroName + " accepts exactly " + colTypes.size() + " arguments.");
}
try {
body = ExprNodeEvaluatorFactory.get(bodyDesc);
} catch (HiveException ex) {
throw new UDFArgumentException(ex);
}
converters = new ObjectInspectorConverters.Converter[arguments.length];
ArrayList<ObjectInspector> colObjectInspectors = new ArrayList<ObjectInspector>(colTypes.size());
for (int index = 0; index < arguments.length; ++index) {
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(colTypes.get(index));
colObjectInspectors.add(objectInspector);
converters[index] = ObjectInspectorConverters.getConverter(arguments[index], objectInspector);
}
evaluatedArguments = new ArrayList<Object>(arguments.length);
ObjectInspector structOI = ObjectInspectorFactory.getStandardStructObjectInspector(colNames, colObjectInspectors);
try {
return body.initialize(structOI);
} catch (HiveException ex) {
throw new UDFArgumentException(ex);
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class GenericUDFInBloomFilter method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
// Return if either of the arguments is null
if (arguments[0].get() == null || arguments[1].get() == null) {
return null;
}
if (!initializedBloomFilter) {
// Setup the bloom filter once
try {
BytesWritable bw = (BytesWritable) arguments[1].get();
byte[] bytes = new byte[bw.getLength()];
System.arraycopy(bw.getBytes(), 0, bytes, 0, bw.getLength());
bloomFilter = BloomFilter.deserialize(new ByteArrayInputStream(bytes));
} catch (IOException e) {
throw new HiveException(e);
}
initializedBloomFilter = true;
}
// Check if the value is in bloom filter
switch(((PrimitiveObjectInspector) valObjectInspector).getTypeInfo().getPrimitiveCategory()) {
case BOOLEAN:
boolean vBoolean = ((BooleanObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vBoolean ? 1 : 0);
case BYTE:
byte vByte = ((ByteObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vByte);
case SHORT:
short vShort = ((ShortObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vShort);
case INT:
int vInt = ((IntObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vInt);
case LONG:
long vLong = ((LongObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testLong(vLong);
case FLOAT:
float vFloat = ((FloatObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testDouble(vFloat);
case DOUBLE:
double vDouble = ((DoubleObjectInspector) valObjectInspector).get(arguments[0].get());
return bloomFilter.testDouble(vDouble);
case DECIMAL:
HiveDecimalWritable vDecimal = ((HiveDecimalObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
int startIdx = vDecimal.toBytes(scratchBuffer);
return bloomFilter.testBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
case DATE:
DateWritable vDate = ((DateObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testLong(vDate.getDays());
case TIMESTAMP:
Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector).getPrimitiveJavaObject(arguments[0].get());
return bloomFilter.testLong(vTimeStamp.getTime());
case CHAR:
Text vChar = ((HiveCharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getStrippedValue();
return bloomFilter.testBytes(vChar.getBytes(), 0, vChar.getLength());
case VARCHAR:
Text vVarchar = ((HiveVarcharObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get()).getTextValue();
return bloomFilter.testBytes(vVarchar.getBytes(), 0, vVarchar.getLength());
case STRING:
Text vString = ((StringObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testBytes(vString.getBytes(), 0, vString.getLength());
case BINARY:
BytesWritable vBytes = ((BinaryObjectInspector) valObjectInspector).getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testBytes(vBytes.getBytes(), 0, vBytes.getLength());
default:
throw new UDFArgumentTypeException(0, "Bad primitive category " + ((PrimitiveTypeInfo) valObjectInspector).getPrimitiveCategory());
}
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class HiveHBaseTableInputFormat method getTimestampVal.
private long getTimestampVal(IndexSearchCondition sc) throws IOException {
long timestamp;
try {
ExprNodeConstantEvaluator eval = new ExprNodeConstantEvaluator(sc.getConstantDesc());
ObjectInspector inspector = eval.initialize(null);
Object value = eval.evaluate(null);
if (inspector instanceof LongObjectInspector) {
timestamp = ((LongObjectInspector) inspector).get(value);
} else {
PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector;
timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime();
}
} catch (HiveException e) {
throw new IOException(e);
}
return timestamp;
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class HCatTable method storageHandler.
/**
* Setter for StorageHandler class.
*/
public HCatTable storageHandler(String storageHandler) throws HCatException {
this.tblProps.put(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE, storageHandler);
LOG.warn("HiveStorageHandlers can't be reliably instantiated on the client-side. " + "Attempting to derive Input/OutputFormat settings from StorageHandler, on best effort: ");
try {
HiveStorageHandler sh = HiveUtils.getStorageHandler(getConf(), storageHandler);
this.sd.setInputFormat(sh.getInputFormatClass().getName());
this.sd.setOutputFormat(sh.getOutputFormatClass().getName());
this.sd.getSerdeInfo().setSerializationLib(sh.getSerDeClass().getName());
} catch (HiveException e) {
LOG.warn("Could not derive Input/OutputFormat and SerDe settings from storageHandler. " + "These values need to be set explicitly.", e);
}
return this;
}
use of org.apache.hadoop.hive.ql.metadata.HiveException in project hive by apache.
the class SQLAuthorizationUtils method getPrivilegesFromMetaStore.
/**
* Get the privileges this user(userName argument) has on the object
* (hivePrivObject argument) If isAdmin is true, adds an admin privilege as
* well.
*
* @param metastoreClient
* @param userName
* @param hivePrivObject
* @param curRoles
* current active roles for user
* @param isAdmin
* if user can run as admin user
* @return
* @throws HiveAuthzPluginException
*/
static RequiredPrivileges getPrivilegesFromMetaStore(IMetaStoreClient metastoreClient, String userName, HivePrivilegeObject hivePrivObject, List<String> curRoles, boolean isAdmin) throws HiveAuthzPluginException {
// get privileges for this user and its role on this object
PrincipalPrivilegeSet thrifPrivs = null;
try {
HiveObjectRef objectRef = AuthorizationUtils.getThriftHiveObjectRef(hivePrivObject);
if (objectRef.getObjectType() == null) {
objectRef.setObjectType(HiveObjectType.GLOBAL);
}
thrifPrivs = metastoreClient.get_privilege_set(objectRef, userName, null);
} catch (MetaException e) {
throwGetPrivErr(e, hivePrivObject, userName);
} catch (TException e) {
throwGetPrivErr(e, hivePrivObject, userName);
} catch (HiveException e) {
throwGetPrivErr(e, hivePrivObject, userName);
}
filterPrivsByCurrentRoles(thrifPrivs, curRoles);
// convert to RequiredPrivileges
RequiredPrivileges privs = getRequiredPrivsFromThrift(thrifPrivs);
// add owner privilege if user is owner of the object
if (isOwner(metastoreClient, userName, curRoles, hivePrivObject)) {
privs.addPrivilege(SQLPrivTypeGrant.OWNER_PRIV);
}
if (isAdmin) {
privs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
}
return privs;
}
Aggregations