use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class HBaseReadWrite method printFunction.
/**
* Print out a function
* @param key key to get the function, must include dbname.
* @return string of the function
* @throws IOException
* @throws TException
*/
String printFunction(String key) throws IOException, TException {
byte[] k = HBaseUtils.buildKey(key);
byte[] serialized = read(FUNC_TABLE, k, CATALOG_CF, CATALOG_COL);
if (serialized == null)
return noSuch(key, "function");
Function func = HBaseUtils.deserializeFunction(k, serialized);
return dumpThriftObject(func);
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class HBaseStore method getFunctions.
@Override
public List<String> getFunctions(String dbName, String pattern) throws MetaException {
boolean commit = false;
openTransaction();
try {
List<Function> funcs = getHBase().scanFunctions(dbName, likeToRegex(pattern));
List<String> funcNames = new ArrayList<String>(funcs.size());
for (Function func : funcs) funcNames.add(func.getFunctionName());
commit = true;
return funcNames;
} catch (IOException e) {
LOG.error("Unable to get functions" + e);
throw new MetaException("Unable to read from or write to hbase " + e.getMessage());
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class FunctionTask method createPermanentFunction.
// todo authorization
private int createPermanentFunction(Hive db, CreateFunctionDesc createFunctionDesc) throws HiveException, IOException {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(createFunctionDesc.getFunctionName());
String dbName = qualifiedNameParts[0];
String funcName = qualifiedNameParts[1];
String registeredName = FunctionUtils.qualifyFunctionName(funcName, dbName);
String className = createFunctionDesc.getClassName();
List<ResourceUri> resources = createFunctionDesc.getResources();
// For permanent functions, check for any resources from local filesystem.
checkLocalFunctionResources(db, createFunctionDesc.getResources());
FunctionInfo registered = null;
try {
registered = FunctionRegistry.registerPermanentFunction(registeredName, className, true, toFunctionResource(resources));
} catch (RuntimeException ex) {
Throwable t = ex;
while (t.getCause() != null) {
t = t.getCause();
}
}
if (registered == null) {
console.printError("Failed to register " + registeredName + " using class " + createFunctionDesc.getClassName());
return 1;
}
// Add to metastore
Function func = new Function(funcName, dbName, className, SessionState.get().getUserName(), PrincipalType.USER, (int) (System.currentTimeMillis() / 1000), org.apache.hadoop.hive.metastore.api.FunctionType.JAVA, resources);
db.createFunction(func);
return 0;
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class TestHBaseImport method setupObjectStore.
private void setupObjectStore(RawStore rdbms, String[] roles, String[] dbNames, String[] tokenIds, String[] tokens, String[] masterKeys, int now, boolean putConstraintsOnTables) throws MetaException, InvalidObjectException, NoSuchObjectException {
if (roles != null) {
for (int i = 0; i < roles.length; i++) {
rdbms.addRole(roles[i], "me");
}
}
for (int i = 0; i < dbNames.length; i++) {
rdbms.createDatabase(new Database(dbNames[i], "no description", "file:/tmp", emptyParameters));
List<FieldSchema> cols = new ArrayList<>();
cols.add(new FieldSchema("col1", "int", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
rdbms.createTable(new Table(tableNames[0], dbNames[i], "me", now, now, 0, sd, null, emptyParameters, null, null, null));
if (putConstraintsOnTables) {
rdbms.addPrimaryKeys(Collections.singletonList(new SQLPrimaryKey(dbNames[i], tableNames[0], "col1", 0, dbNames[i] + "_" + pkNames[0], true, false, true)));
}
List<FieldSchema> partCols = new ArrayList<>();
partCols.add(new FieldSchema("region", "string", ""));
rdbms.createTable(new Table(tableNames[1], dbNames[i], "me", now, now, 0, sd, partCols, emptyParameters, null, null, null));
if (putConstraintsOnTables) {
rdbms.addPrimaryKeys(Arrays.asList(new SQLPrimaryKey(dbNames[i], tableNames[1], "col1", 0, dbNames[i] + "_" + pkNames[1], true, false, true)));
rdbms.addForeignKeys(Collections.singletonList(new SQLForeignKey(dbNames[i], tableNames[0], "col1", dbNames[i], tableNames[1], "col1", 0, 1, 2, dbNames[i] + "_" + fkNames[1], dbNames[i] + "_" + pkNames[0], true, false, true)));
}
for (int j = 0; j < partVals.length; j++) {
StorageDescriptor psd = new StorageDescriptor(sd);
psd.setLocation("file:/tmp/region=" + partVals[j]);
Partition part = new Partition(Arrays.asList(partVals[j]), dbNames[i], tableNames[1], now, now, psd, emptyParameters);
rdbms.addPartition(part);
}
for (String funcName : funcNames) {
LOG.debug("Creating new function " + dbNames[i] + "." + funcName);
rdbms.createFunction(new Function(funcName, dbNames[i], "classname", "ownername", PrincipalType.USER, (int) System.currentTimeMillis() / 1000, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "uri"))));
}
for (String indexName : indexNames) {
LOG.debug("Creating new index " + dbNames[i] + "." + tableNames[0] + "." + indexName);
String indexTableName = tableNames[0] + "__" + indexName + "__";
rdbms.createTable(new Table(indexTableName, dbNames[i], "me", now, now, 0, sd, partCols, emptyParameters, null, null, null));
rdbms.addIndex(new Index(indexName, null, dbNames[i], tableNames[0], now, now, indexTableName, sd, emptyParameters, false));
}
}
if (tokenIds != null) {
for (int i = 0; i < tokenIds.length; i++) rdbms.addToken(tokenIds[i], tokens[i]);
}
if (masterKeys != null) {
for (int i = 0; i < masterKeys.length; i++) {
masterKeySeqs.add(rdbms.addMasterKey(masterKeys[i]));
}
}
}
use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.
the class TestHBaseStoreIntegration method getFuncsRegex.
@Test
public void getFuncsRegex() throws Exception {
String dbname = "default";
int now = (int) (System.currentTimeMillis() / 1000);
String[] funcNames = new String[3];
for (int i = 0; i < funcNames.length; i++) {
funcNames[i] = "func" + i;
store.createFunction(new Function(funcNames[i], dbname, "o.a.h.h.myfunc", "me", PrincipalType.USER, now, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "file:/tmp/somewhere"))));
}
List<String> funcs = store.getFunctions(dbname, "func1|func2");
Assert.assertEquals(2, funcs.size());
String[] namesFromStore = funcs.toArray(new String[2]);
Arrays.sort(namesFromStore);
Assert.assertArrayEquals(Arrays.copyOfRange(funcNames, 1, 3), namesFromStore);
funcs = store.getFunctions(dbname, "func*");
Assert.assertEquals(3, funcs.size());
namesFromStore = funcs.toArray(new String[3]);
Arrays.sort(namesFromStore);
Assert.assertArrayEquals(funcNames, namesFromStore);
funcs = store.getFunctions("nosuchdb", "func*");
Assert.assertEquals(0, funcs.size());
}
Aggregations