use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class LlapServiceDriver method downloadPermanentFunctions.
private Set<String> downloadPermanentFunctions(Configuration conf, Path udfDir) throws HiveException, URISyntaxException, IOException {
Map<String, String> udfs = new HashMap<String, String>();
HiveConf hiveConf = new HiveConf();
// disable expensive operations on the metastore
hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_INIT_METADATA_COUNT_ENABLED, false);
hiveConf.setBoolVar(HiveConf.ConfVars.METASTORE_METRICS, false);
// performance problem: ObjectStore does its own new HiveConf()
Hive hive = Hive.getWithFastCheck(hiveConf, false);
ResourceDownloader resourceDownloader = new ResourceDownloader(conf, udfDir.toUri().normalize().getPath());
List<Function> fns = hive.getAllFunctions();
Set<URI> srcUris = new HashSet<>();
for (Function fn : fns) {
String fqfn = fn.getDbName() + "." + fn.getFunctionName();
if (udfs.containsKey(fn.getClassName())) {
LOG.warn("Duplicate function names found for " + fn.getClassName() + " with " + fqfn + " and " + udfs.get(fn.getClassName()));
}
udfs.put(fn.getClassName(), fqfn);
List<ResourceUri> resources = fn.getResourceUris();
if (resources == null || resources.isEmpty()) {
LOG.warn("Missing resources for " + fqfn);
continue;
}
for (ResourceUri resource : resources) {
srcUris.add(ResourceDownloader.createURI(resource.getUri()));
}
}
for (URI srcUri : srcUris) {
List<URI> localUris = resourceDownloader.downloadExternal(srcUri, null, false);
for (URI dst : localUris) {
LOG.warn("Downloaded " + dst + " from " + srcUri);
}
}
return udfs.keySet();
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class HBaseImport method copyOneFunction.
private void copyOneFunction(String dbName, String funcName) throws MetaException, InvalidObjectException {
Function func = rdbmsStore.get().getFunction(dbName, funcName);
screen("Copying function " + dbName + "." + funcName);
hbaseStore.get().createFunction(func);
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class HBaseUtils method serializeFunction.
/**
* Serialize a function
* @param func function to serialize
* @return two byte arrays, first contains the key, the second the value.
*/
static byte[][] serializeFunction(Function func) {
byte[][] result = new byte[2][];
result[0] = buildKey(func.getDbName(), func.getFunctionName());
HbaseMetastoreProto.Function.Builder builder = HbaseMetastoreProto.Function.newBuilder();
if (func.getClassName() != null)
builder.setClassName(func.getClassName());
if (func.getOwnerName() != null)
builder.setOwnerName(func.getOwnerName());
if (func.getOwnerType() != null) {
builder.setOwnerType(convertPrincipalTypes(func.getOwnerType()));
}
builder.setCreateTime(func.getCreateTime());
if (func.getFunctionType() != null) {
builder.setFunctionType(convertFunctionTypes(func.getFunctionType()));
}
if (func.getResourceUris() != null) {
for (ResourceUri uri : func.getResourceUris()) {
builder.addResourceUris(HbaseMetastoreProto.Function.ResourceUri.newBuilder().setResourceType(convertResourceTypes(uri.getResourceType())).setUri(uri.getUri()));
}
}
result[1] = builder.build().toByteArray();
return result;
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class HBaseStore method getAllFunctions.
@Override
public List<Function> getAllFunctions() throws MetaException {
boolean commit = false;
openTransaction();
try {
List<Function> funcs = getHBase().scanFunctions(null, ".*");
commit = true;
return funcs;
} catch (IOException e) {
LOG.error("Unable to get functions" + e);
throw new MetaException("Unable to read from or write to hbase " + e.getMessage());
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class TestHBaseStore method alterFunction.
@Test
public void alterFunction() throws Exception {
String funcName = "alterfunc";
int now = (int) (System.currentTimeMillis() / 1000);
List<ResourceUri> uris = new ArrayList<ResourceUri>();
uris.add(new ResourceUri(ResourceType.FILE, "whatever"));
Function func = new Function(funcName, DB, "o.a.h.h.myfunc", "me", PrincipalType.USER, now, FunctionType.JAVA, uris);
store.createFunction(func);
Function f = store.getFunction(DB, funcName);
Assert.assertEquals(ResourceType.FILE, f.getResourceUris().get(0).getResourceType());
func.addToResourceUris(new ResourceUri(ResourceType.ARCHIVE, "file"));
store.alterFunction(DB, funcName, func);
f = store.getFunction(DB, funcName);
Assert.assertEquals(2, f.getResourceUrisSize());
Assert.assertEquals(ResourceType.FILE, f.getResourceUris().get(0).getResourceType());
Assert.assertEquals(ResourceType.ARCHIVE, f.getResourceUris().get(1).getResourceType());
}
Aggregations