use of org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource in project hive by apache.
the class FunctionTask method toFunctionResource.
public static FunctionResource[] toFunctionResource(List<ResourceUri> resources) throws HiveException {
if (resources == null) {
return null;
}
FunctionResource[] converted = new FunctionResource[resources.size()];
for (int i = 0; i < converted.length; i++) {
ResourceUri resource = resources.get(i);
SessionState.ResourceType type = getResourceType(resource.getResourceType());
converted[i] = new FunctionResource(type, resource.getUri());
}
return converted;
}
use of org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource in project hive by apache.
the class FunctionLocalizer method localizeOneResource.
private void localizeOneResource(String fqfn, URI srcUri, ResourceType rt, FnResources result) throws URISyntaxException, IOException {
RefCountedResource rcr = localFiles.get(srcUri);
if (rcr != null && rcr.refCount > 0) {
logFilesUsed("Reusing", fqfn, srcUri, rcr);
++rcr.refCount;
result.addResources(rcr);
return;
}
rcr = new RefCountedResource();
List<URI> localUris = resourceDownloader.downloadExternal(srcUri, fqfn, false);
if (localUris == null || localUris.isEmpty()) {
LOG.error("Cannot download " + srcUri + " for " + fqfn);
return;
}
rcr.resources = new ArrayList<>();
for (URI uri : localUris) {
// Reuse the same type for all. Only Ivy can return more than one, probably all jars.
String path = uri.getPath();
rcr.resources.add(new FunctionResource(rt, path));
if (rt == ResourceType.JAR) {
recentlyLocalizedJars.add(path);
}
}
++rcr.refCount;
logFilesUsed("Using", fqfn, srcUri, rcr);
localFiles.put(srcUri, rcr);
result.addResources(rcr);
}
use of org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource in project hive by apache.
the class Registry method registerToSessionRegistry.
// should be called after session registry is checked
private FunctionInfo registerToSessionRegistry(String qualifiedName, FunctionInfo function) throws SemanticException {
FunctionInfo ret = null;
ClassLoader prev = Utilities.getSessionSpecifiedClassLoader();
try {
// Found UDF in metastore - now add it to the function registry
// At this point we should add any relevant jars that would be needed for the UDf.
FunctionResource[] resources = function.getResources();
try {
FunctionUtils.addFunctionResources(resources);
} catch (Exception e) {
LOG.error("Unable to load resources for " + qualifiedName + ":" + e, e);
return null;
}
ClassLoader loader = Utilities.getSessionSpecifiedClassLoader();
Class<?> udfClass = Class.forName(function.getClassName(), true, loader);
// Make sure the FunctionInfo is listed as PERSISTENT (rather than TEMPORARY)
// when it is registered to the system registry.
ret = SessionState.getRegistryForWrite().registerFunction(qualifiedName, FunctionType.PERSISTENT, udfClass, resources);
if (ret == null) {
LOG.error(function.getClassName() + " is not a valid UDF class and was not registered.");
}
if (SessionState.get().isHiveServerQuery()) {
SessionState.getRegistryForWrite().addToUDFLoaders(loader);
}
} catch (ClassNotFoundException e) {
// Lookup of UDf class failed
LOG.error("Unable to load UDF class: " + e);
Utilities.restoreSessionSpecifiedClassLoader(prev);
throw new SemanticException("Unable to load UDF class: " + e + "\nPlease ensure that the JAR file containing this class has been properly installed " + "in the auxiliary directory or was added with ADD JAR command.");
} finally {
function.shareStateWith(ret);
}
return ret;
}
use of org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource in project hive by apache.
the class DescFunctionOperation method printExtendedInfoIfRequested.
private void printExtendedInfoIfRequested(DataOutputStream outStream, FunctionInfo functionInfo, Class<?> funcClass) throws IOException {
if (!desc.isExtended()) {
return;
}
if (funcClass != null) {
outStream.writeBytes("Function class:" + funcClass.getName() + "\n");
}
if (functionInfo != null) {
outStream.writeBytes("Function type:" + functionInfo.getFunctionType() + "\n");
FunctionResource[] resources = functionInfo.getResources();
if (resources != null) {
for (FunctionResource resource : resources) {
outStream.writeBytes("Resource:" + resource.getResourceURI() + "\n");
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource in project hive by apache.
the class FunctionUtils method toFunctionResource.
public static FunctionResource[] toFunctionResource(List<ResourceUri> resources) throws HiveException {
if (resources == null) {
return null;
}
FunctionResource[] converted = new FunctionResource[resources.size()];
for (int i = 0; i < converted.length; i++) {
ResourceUri resource = resources.get(i);
SessionState.ResourceType type = getResourceType(resource.getResourceType());
converted[i] = new FunctionResource(type, resource.getUri());
}
return converted;
}
Aggregations