use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class LlapServiceDriver method downloadPermanentFunctions.
private Set<String> downloadPermanentFunctions(Configuration conf, Path udfDir) throws HiveException, URISyntaxException, IOException {
Map<String, String> udfs = new HashMap<String, String>();
HiveConf hiveConf = new HiveConf();
// disable expensive operations on the metastore
hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_INIT_METADATA_COUNT_ENABLED, false);
hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_METRICS, false);
// performance problem: ObjectStore does its own new HiveConf()
Hive hive = Hive.getWithFastCheck(hiveConf, false);
ResourceDownloader resourceDownloader = new ResourceDownloader(conf, udfDir.toUri().normalize().getPath());
List<Function> fns = hive.getAllFunctions();
Set<URI> srcUris = new HashSet<>();
for (Function fn : fns) {
String fqfn = fn.getDbName() + "." + fn.getFunctionName();
if (udfs.containsKey(fn.getClassName())) {
LOG.warn("Duplicate function names found for " + fn.getClassName() + " with " + fqfn + " and " + udfs.get(fn.getClassName()));
}
udfs.put(fn.getClassName(), fqfn);
List<ResourceUri> resources = fn.getResourceUris();
if (resources == null || resources.isEmpty()) {
LOG.warn("Missing resources for " + fqfn);
continue;
}
for (ResourceUri resource : resources) {
srcUris.add(ResourceDownloader.createURI(resource.getUri()));
}
}
for (URI srcUri : srcUris) {
List<URI> localUris = resourceDownloader.downloadExternal(srcUri, null, false);
for (URI dst : localUris) {
LOG.warn("Downloaded " + dst + " from " + srcUri);
}
}
return udfs.keySet();
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class FunctionTask method toFunctionResource.
public static FunctionResource[] toFunctionResource(List<ResourceUri> resources) throws HiveException {
if (resources == null) {
return null;
}
FunctionResource[] converted = new FunctionResource[resources.size()];
for (int i = 0; i < converted.length; i++) {
ResourceUri resource = resources.get(i);
SessionState.ResourceType type = getResourceType(resource.getResourceType());
converted[i] = new FunctionResource(type, resource.getUri());
}
return converted;
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class HBaseUtils method serializeFunction.
/**
* Serialize a function
* @param func function to serialize
* @return two byte arrays, first contains the key, the second the value.
*/
static byte[][] serializeFunction(Function func) {
byte[][] result = new byte[2][];
result[0] = buildKey(func.getDbName(), func.getFunctionName());
HbaseMetastoreProto.Function.Builder builder = HbaseMetastoreProto.Function.newBuilder();
if (func.getClassName() != null)
builder.setClassName(func.getClassName());
if (func.getOwnerName() != null)
builder.setOwnerName(func.getOwnerName());
if (func.getOwnerType() != null) {
builder.setOwnerType(convertPrincipalTypes(func.getOwnerType()));
}
builder.setCreateTime(func.getCreateTime());
if (func.getFunctionType() != null) {
builder.setFunctionType(convertFunctionTypes(func.getFunctionType()));
}
if (func.getResourceUris() != null) {
for (ResourceUri uri : func.getResourceUris()) {
builder.addResourceUris(HbaseMetastoreProto.Function.ResourceUri.newBuilder().setResourceType(convertResourceTypes(uri.getResourceType())).setUri(uri.getUri()));
}
}
result[1] = builder.build().toByteArray();
return result;
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class TestHBaseStore method alterFunction.
@Test
public void alterFunction() throws Exception {
String funcName = "alterfunc";
int now = (int) (System.currentTimeMillis() / 1000);
List<ResourceUri> uris = new ArrayList<ResourceUri>();
uris.add(new ResourceUri(ResourceType.FILE, "whatever"));
Function func = new Function(funcName, DB, "o.a.h.h.myfunc", "me", PrincipalType.USER, now, FunctionType.JAVA, uris);
store.createFunction(func);
Function f = store.getFunction(DB, funcName);
Assert.assertEquals(ResourceType.FILE, f.getResourceUris().get(0).getResourceType());
func.addToResourceUris(new ResourceUri(ResourceType.ARCHIVE, "file"));
store.alterFunction(DB, funcName, func);
f = store.getFunction(DB, funcName);
Assert.assertEquals(2, f.getResourceUrisSize());
Assert.assertEquals(ResourceType.FILE, f.getResourceUris().get(0).getResourceType());
Assert.assertEquals(ResourceType.ARCHIVE, f.getResourceUris().get(1).getResourceType());
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class TestHBaseStore method createFunction.
@Test
public void createFunction() throws Exception {
String funcName = "createfunc";
int now = (int) (System.currentTimeMillis() / 1000);
Function func = new Function(funcName, DB, "o.a.h.h.myfunc", "me", PrincipalType.USER, now, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "file:/tmp/somewhere")));
store.createFunction(func);
Function f = store.getFunction(DB, funcName);
Assert.assertEquals(DB, f.getDbName());
Assert.assertEquals(funcName, f.getFunctionName());
Assert.assertEquals("o.a.h.h.myfunc", f.getClassName());
Assert.assertEquals("me", f.getOwnerName());
Assert.assertEquals(PrincipalType.USER, f.getOwnerType());
Assert.assertTrue(now <= f.getCreateTime());
Assert.assertEquals(FunctionType.JAVA, f.getFunctionType());
Assert.assertEquals(1, f.getResourceUrisSize());
Assert.assertEquals(ResourceType.JAR, f.getResourceUris().get(0).getResourceType());
Assert.assertEquals("file:/tmp/somewhere", f.getResourceUris().get(0).getUri());
}
Aggregations