use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class TestFunctions method addNoSuchCatalog.
@Test(expected = NoSuchObjectException.class)
public void addNoSuchCatalog() throws TException {
String functionName = "test_function";
new FunctionBuilder().setName(functionName).setCatName("nosuch").setDbName(DEFAULT_DATABASE_NAME).setClass(TEST_FUNCTION_CLASS).setFunctionType(FunctionType.JAVA).setOwnerType(PrincipalType.ROLE).setOwner("owner").setCreateTime(100).addResourceUri(new ResourceUri(ResourceType.JAR, "hdfs:///tmp/jar1.jar")).addResourceUri(new ResourceUri(ResourceType.FILE, "hdfs:///tmp/file1.txt")).addResourceUri(new ResourceUri(ResourceType.ARCHIVE, "hdfs:///tmp/archive1.tgz")).create(client, metaStore.getConf());
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class HBaseUtils method deserializeFunction.
/**
* Deserialize a function. This method should be used when the function and db name are
* already known.
* @param dbName name of the database the function is in
* @param functionName name of the function
* @param value serialized value of the function
* @return function as an object
* @throws InvalidProtocolBufferException
*/
static Function deserializeFunction(String dbName, String functionName, byte[] value) throws InvalidProtocolBufferException {
Function func = new Function();
func.setDbName(dbName);
func.setFunctionName(functionName);
HbaseMetastoreProto.Function protoFunc = HbaseMetastoreProto.Function.parseFrom(value);
if (protoFunc.hasClassName())
func.setClassName(protoFunc.getClassName());
if (protoFunc.hasOwnerName())
func.setOwnerName(protoFunc.getOwnerName());
if (protoFunc.hasOwnerType()) {
func.setOwnerType(convertPrincipalTypes(protoFunc.getOwnerType()));
}
func.setCreateTime((int) protoFunc.getCreateTime());
if (protoFunc.hasFunctionType()) {
func.setFunctionType(convertFunctionTypes(protoFunc.getFunctionType()));
}
for (HbaseMetastoreProto.Function.ResourceUri protoUri : protoFunc.getResourceUrisList()) {
func.addToResourceUris(new ResourceUri(convertResourceTypes(protoUri.getResourceType()), protoUri.getUri()));
}
return func;
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class FunctionSemanticAnalyzer method getResourceList.
private List<ResourceUri> getResourceList(ASTNode ast) throws SemanticException {
List<ResourceUri> resources = null;
ASTNode resourcesNode = (ASTNode) ast.getFirstChildWithType(HiveParser.TOK_RESOURCE_LIST);
if (resourcesNode != null) {
resources = new ArrayList<ResourceUri>();
for (int idx = 0; idx < resourcesNode.getChildCount(); ++idx) {
// ^(TOK_RESOURCE_URI $resType $resPath)
ASTNode resNode = (ASTNode) resourcesNode.getChild(idx);
if (resNode.getToken().getType() != HiveParser.TOK_RESOURCE_URI) {
throw new SemanticException("Expected token type TOK_RESOURCE_URI but found " + resNode.getToken().toString());
}
if (resNode.getChildCount() != 2) {
throw new SemanticException("Expected 2 child nodes of TOK_RESOURCE_URI but found " + resNode.getChildCount());
}
ASTNode resTypeNode = (ASTNode) resNode.getChild(0);
ASTNode resUriNode = (ASTNode) resNode.getChild(1);
ResourceType resourceType = getResourceType(resTypeNode);
resources.add(new ResourceUri(resourceType, PlanUtils.stripQuotes(resUriNode.getText())));
}
}
return resources;
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class FunctionTask method checkLocalFunctionResources.
private void checkLocalFunctionResources(Hive db, List<ResourceUri> resources) throws HiveException {
// So disallow resources from local filesystem in this case.
if (resources != null && resources.size() > 0) {
try {
String localFsScheme = FileSystem.getLocal(db.getConf()).getUri().getScheme();
String configuredFsScheme = FileSystem.get(db.getConf()).getUri().getScheme();
if (configuredFsScheme.equals(localFsScheme)) {
// Configured warehouse FS is local, don't need to bother checking.
return;
}
for (ResourceUri res : resources) {
String resUri = res.getUri();
if (ResourceDownloader.isFileUri(resUri)) {
throw new HiveException("Hive warehouse is non-local, but " + res.getUri() + " specifies file on local filesystem. " + "Resources on non-local warehouse should specify a non-local scheme/path");
}
}
} catch (HiveException e) {
throw e;
} catch (Exception e) {
LOG.error("Exception caught in checkLocalFunctionResources", e);
throw new HiveException(e);
}
}
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class FunctionTask method createPermanentFunction.
// todo authorization
private int createPermanentFunction(Hive db, CreateFunctionDesc createFunctionDesc) throws HiveException, IOException {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(createFunctionDesc.getFunctionName());
String dbName = qualifiedNameParts[0];
String funcName = qualifiedNameParts[1];
String registeredName = FunctionUtils.qualifyFunctionName(funcName, dbName);
String className = createFunctionDesc.getClassName();
List<ResourceUri> resources = createFunctionDesc.getResources();
// For permanent functions, check for any resources from local filesystem.
checkLocalFunctionResources(db, createFunctionDesc.getResources());
FunctionInfo registered = null;
try {
registered = FunctionRegistry.registerPermanentFunction(registeredName, className, true, toFunctionResource(resources));
} catch (RuntimeException ex) {
Throwable t = ex;
while (t.getCause() != null) {
t = t.getCause();
}
}
if (registered == null) {
console.printError("Failed to register " + registeredName + " using class " + createFunctionDesc.getClassName());
return 1;
}
// Add to metastore
Function func = new Function(funcName, dbName, className, SessionState.get().getUserName(), PrincipalType.USER, (int) (System.currentTimeMillis() / 1000), org.apache.hadoop.hive.metastore.api.FunctionType.JAVA, resources);
db.createFunction(func);
return 0;
}
Aggregations