Search in sources :

Example 21 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class TestHBaseImport method setupObjectStore.

private void setupObjectStore(RawStore rdbms, String[] roles, String[] dbNames, String[] tokenIds, String[] tokens, String[] masterKeys, int now, boolean putConstraintsOnTables) throws MetaException, InvalidObjectException, NoSuchObjectException {
    if (roles != null) {
        for (int i = 0; i < roles.length; i++) {
            rdbms.addRole(roles[i], "me");
        }
    }
    for (int i = 0; i < dbNames.length; i++) {
        rdbms.createDatabase(new Database(dbNames[i], "no description", "file:/tmp", emptyParameters));
        List<FieldSchema> cols = new ArrayList<>();
        cols.add(new FieldSchema("col1", "int", "nocomment"));
        SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
        StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
        rdbms.createTable(new Table(tableNames[0], dbNames[i], "me", now, now, 0, sd, null, emptyParameters, null, null, null));
        if (putConstraintsOnTables) {
            rdbms.addPrimaryKeys(Collections.singletonList(new SQLPrimaryKey(dbNames[i], tableNames[0], "col1", 0, dbNames[i] + "_" + pkNames[0], true, false, true)));
        }
        List<FieldSchema> partCols = new ArrayList<>();
        partCols.add(new FieldSchema("region", "string", ""));
        rdbms.createTable(new Table(tableNames[1], dbNames[i], "me", now, now, 0, sd, partCols, emptyParameters, null, null, null));
        if (putConstraintsOnTables) {
            rdbms.addPrimaryKeys(Arrays.asList(new SQLPrimaryKey(dbNames[i], tableNames[1], "col1", 0, dbNames[i] + "_" + pkNames[1], true, false, true)));
            rdbms.addForeignKeys(Collections.singletonList(new SQLForeignKey(dbNames[i], tableNames[0], "col1", dbNames[i], tableNames[1], "col1", 0, 1, 2, dbNames[i] + "_" + fkNames[1], dbNames[i] + "_" + pkNames[0], true, false, true)));
        }
        for (int j = 0; j < partVals.length; j++) {
            StorageDescriptor psd = new StorageDescriptor(sd);
            psd.setLocation("file:/tmp/region=" + partVals[j]);
            Partition part = new Partition(Arrays.asList(partVals[j]), dbNames[i], tableNames[1], now, now, psd, emptyParameters);
            rdbms.addPartition(part);
        }
        for (String funcName : funcNames) {
            LOG.debug("Creating new function " + dbNames[i] + "." + funcName);
            rdbms.createFunction(new Function(funcName, dbNames[i], "classname", "ownername", PrincipalType.USER, (int) System.currentTimeMillis() / 1000, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "uri"))));
        }
        for (String indexName : indexNames) {
            LOG.debug("Creating new index " + dbNames[i] + "." + tableNames[0] + "." + indexName);
            String indexTableName = tableNames[0] + "__" + indexName + "__";
            rdbms.createTable(new Table(indexTableName, dbNames[i], "me", now, now, 0, sd, partCols, emptyParameters, null, null, null));
            rdbms.addIndex(new Index(indexName, null, dbNames[i], tableNames[0], now, now, indexTableName, sd, emptyParameters, false));
        }
    }
    if (tokenIds != null) {
        for (int i = 0; i < tokenIds.length; i++) rdbms.addToken(tokenIds[i], tokens[i]);
    }
    if (masterKeys != null) {
        for (int i = 0; i < masterKeys.length; i++) {
            masterKeySeqs.add(rdbms.addMasterKey(masterKeys[i]));
        }
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Index(org.apache.hadoop.hive.metastore.api.Index) Function(org.apache.hadoop.hive.metastore.api.Function) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 22 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class TestHBaseStoreIntegration method getFuncsRegex.

@Test
public void getFuncsRegex() throws Exception {
    String dbname = "default";
    int now = (int) (System.currentTimeMillis() / 1000);
    String[] funcNames = new String[3];
    for (int i = 0; i < funcNames.length; i++) {
        funcNames[i] = "func" + i;
        store.createFunction(new Function(funcNames[i], dbname, "o.a.h.h.myfunc", "me", PrincipalType.USER, now, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "file:/tmp/somewhere"))));
    }
    List<String> funcs = store.getFunctions(dbname, "func1|func2");
    Assert.assertEquals(2, funcs.size());
    String[] namesFromStore = funcs.toArray(new String[2]);
    Arrays.sort(namesFromStore);
    Assert.assertArrayEquals(Arrays.copyOfRange(funcNames, 1, 3), namesFromStore);
    funcs = store.getFunctions(dbname, "func*");
    Assert.assertEquals(3, funcs.size());
    namesFromStore = funcs.toArray(new String[3]);
    Arrays.sort(namesFromStore);
    Assert.assertArrayEquals(funcNames, namesFromStore);
    funcs = store.getFunctions("nosuchdb", "func*");
    Assert.assertEquals(0, funcs.size());
}
Also used : Function(org.apache.hadoop.hive.metastore.api.Function) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) Test(org.junit.Test)

Example 23 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class TestHBaseStore method dropFunction.

@Test
public void dropFunction() throws Exception {
    String funcName = "delfunc";
    int now = (int) (System.currentTimeMillis() / 1000);
    Function func = new Function(funcName, DB, "o.a.h.h.myfunc", "me", PrincipalType.USER, now, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "file:/tmp/somewhere")));
    store.createFunction(func);
    Function f = store.getFunction(DB, funcName);
    Assert.assertNotNull(f);
    store.dropFunction(DB, funcName);
    //thrown.expect(NoSuchObjectException.class);
    Assert.assertNull(store.getFunction(DB, funcName));
}
Also used : Function(org.apache.hadoop.hive.metastore.api.Function) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) Test(org.junit.Test)

Example 24 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class FunctionSemanticAnalyzer method addEntities.

/**
 * Add write entities to the semantic analyzer to restrict function creation to privileged users.
 */
private void addEntities(String functionName, String className, boolean isTemporaryFunction, List<ResourceUri> resources) throws SemanticException {
    // If the function is being added under a database 'namespace', then add an entity representing
    // the database (only applicable to permanent/metastore functions).
    // We also add a second entity representing the function name.
    // The authorization api implementation can decide which entities it wants to use to
    // authorize the create/drop function call.
    // Add the relevant database 'namespace' as a WriteEntity
    Database database = null;
    // it matters only for permanent functions
    if (!isTemporaryFunction) {
        try {
            String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(functionName);
            String dbName = qualifiedNameParts[0];
            functionName = qualifiedNameParts[1];
            database = getDatabase(dbName);
        } catch (HiveException e) {
            LOG.error("Failed to get database ", e);
            throw new SemanticException(e);
        }
    }
    if (database != null) {
        outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
    }
    // Add the function name as a WriteEntity
    outputs.add(new WriteEntity(database, functionName, className, Type.FUNCTION, WriteEntity.WriteType.DDL_NO_LOCK));
    if (resources != null) {
        for (ResourceUri resource : resources) {
            String uriPath = resource.getUri();
            outputs.add(toWriteEntity(uriPath));
        }
    }
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Database(org.apache.hadoop.hive.metastore.api.Database) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 25 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class FunctionLocalizer method localizeFunctionResources.

private void localizeFunctionResources(String fqfn, List<ResourceUri> resources, String className, FnResources result, boolean doRefreshClassloader) throws URISyntaxException, IOException {
    // are no collisions within the same fn). That doesn't mean we download for every fn.
    if (LOG.isInfoEnabled()) {
        LOG.info("Localizing " + resources.size() + " resources for " + fqfn);
    }
    for (ResourceUri resource : resources) {
        URI srcUri = ResourceDownloader.createURI(resource.getUri());
        ResourceType rt = FunctionUtils.getResourceType(resource.getResourceType());
        localizeOneResource(fqfn, srcUri, rt, result);
    }
    recentlyLocalizedClasses.add(className);
    if (doRefreshClassloader) {
        refreshClassloader();
    }
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) ResourceType(org.apache.hadoop.hive.ql.session.SessionState.ResourceType) URI(java.net.URI)

Aggregations

ResourceUri (org.apache.hadoop.hive.metastore.api.ResourceUri)34 Function (org.apache.hadoop.hive.metastore.api.Function)22 Test (org.junit.Test)13 ArrayList (java.util.ArrayList)6 Database (org.apache.hadoop.hive.metastore.api.Database)5 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)5 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)5 FunctionBuilder (org.apache.hadoop.hive.metastore.client.builder.FunctionBuilder)5 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)5 IOException (java.io.IOException)4 URI (java.net.URI)4 HashSet (java.util.HashSet)4 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)3 Hive (org.apache.hadoop.hive.ql.metadata.Hive)3 SQLException (java.sql.SQLException)2 HashMap (java.util.HashMap)2 Path (org.apache.hadoop.fs.Path)2 ConfigValSecurityException (org.apache.hadoop.hive.metastore.api.ConfigValSecurityException)2