Search in sources :

Example 51 with Function

use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.

the class FunctionSerializer method writeTo.

@Override
public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvider) throws SemanticException, IOException {
    TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
    List<ResourceUri> resourceUris = new ArrayList<>();
    for (ResourceUri uri : function.getResourceUris()) {
        Path inputPath = new Path(uri.getUri());
        if ("hdfs".equals(inputPath.toUri().getScheme())) {
            FileSystem fileSystem = inputPath.getFileSystem(hiveConf);
            Path qualifiedUri = PathBuilder.fullyQualifiedHDFSUri(inputPath, fileSystem);
            String checkSum = ReplChangeManager.checksumFor(qualifiedUri, fileSystem);
            String newFileUri = ReplChangeManager.encodeFileUri(qualifiedUri.toString(), checkSum);
            resourceUris.add(new ResourceUri(uri.getResourceType(), newFileUri));
        } else {
            resourceUris.add(uri);
        }
    }
    Function copyObj = new Function(this.function);
    if (!resourceUris.isEmpty()) {
        assert resourceUris.size() == this.function.getResourceUris().size();
        copyObj.setResourceUris(resourceUris);
    }
    try {
        // This is required otherwise correct work object on repl load wont be created.
        writer.jsonGenerator.writeStringField(ReplicationSpec.KEY.REPL_SCOPE.toString(), "all");
        writer.jsonGenerator.writeStringField(ReplicationSpec.KEY.CURR_STATE_ID.toString(), additionalPropertiesProvider.getCurrentReplicationState());
        writer.jsonGenerator.writeStringField(FIELD_NAME, serializer.toString(copyObj, UTF_8));
    } catch (TException e) {
        throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) TException(org.apache.thrift.TException) TSerializer(org.apache.thrift.TSerializer) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) Function(org.apache.hadoop.hive.metastore.api.Function) TJSONProtocol(org.apache.thrift.protocol.TJSONProtocol) FileSystem(org.apache.hadoop.fs.FileSystem) ArrayList(java.util.ArrayList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 52 with Function

use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.

the class TestHiveMetaStore method testSimpleFunction.

@Test
public void testSimpleFunction() throws Exception {
    String dbName = "test_db";
    String funcName = "test_func";
    String className = "org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper";
    String owner = "test_owner";
    final int N_FUNCTIONS = 5;
    PrincipalType ownerType = PrincipalType.USER;
    int createTime = (int) (System.currentTimeMillis() / 1000);
    FunctionType funcType = FunctionType.JAVA;
    try {
        cleanUp(dbName, null, null);
        for (Function f : client.getAllFunctions().getFunctions()) {
            client.dropFunction(f.getDbName(), f.getFunctionName());
        }
        createDb(dbName);
        for (int i = 0; i < N_FUNCTIONS; i++) {
            createFunction(dbName, funcName + "_" + i, className, owner, ownerType, createTime, funcType, null);
        }
        // Try the different getters
        // getFunction()
        Function func = client.getFunction(dbName, funcName + "_0");
        assertEquals("function db name", dbName, func.getDbName());
        assertEquals("function name", funcName + "_0", func.getFunctionName());
        assertEquals("function class name", className, func.getClassName());
        assertEquals("function owner name", owner, func.getOwnerName());
        assertEquals("function owner type", PrincipalType.USER, func.getOwnerType());
        assertEquals("function type", funcType, func.getFunctionType());
        List<ResourceUri> resources = func.getResourceUris();
        assertTrue("function resources", resources == null || resources.size() == 0);
        boolean gotException = false;
        try {
            func = client.getFunction(dbName, "nonexistent_func");
        } catch (NoSuchObjectException e) {
            // expected failure
            gotException = true;
        }
        assertEquals(true, gotException);
        // getAllFunctions()
        GetAllFunctionsResponse response = client.getAllFunctions();
        List<Function> allFunctions = response.getFunctions();
        assertEquals(N_FUNCTIONS, allFunctions.size());
        assertEquals(funcName + "_3", allFunctions.get(3).getFunctionName());
        // getFunctions()
        List<String> funcs = client.getFunctions(dbName, "*_func_*");
        assertEquals(N_FUNCTIONS, funcs.size());
        assertEquals(funcName + "_0", funcs.get(0));
        funcs = client.getFunctions(dbName, "nonexistent_func");
        assertEquals(0, funcs.size());
        // dropFunction()
        for (int i = 0; i < N_FUNCTIONS; i++) {
            client.dropFunction(dbName, funcName + "_" + i);
        }
        // Confirm that the function is now gone
        funcs = client.getFunctions(dbName, funcName);
        assertEquals(0, funcs.size());
        response = client.getAllFunctions();
        allFunctions = response.getFunctions();
        assertEquals(0, allFunctions.size());
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testConcurrentMetastores() failed.");
        throw e;
    } finally {
        silentDropDatabase(dbName);
    }
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) FunctionType(org.apache.hadoop.hive.metastore.api.FunctionType) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Function(org.apache.hadoop.hive.metastore.api.Function) GetAllFunctionsResponse(org.apache.hadoop.hive.metastore.api.GetAllFunctionsResponse) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType) Test(org.junit.Test)

Example 53 with Function

use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.

the class TestFunctions method testGetFunctionNoSuchFunctionInThisDatabase.

@Test(expected = NoSuchObjectException.class)
public void testGetFunctionNoSuchFunctionInThisDatabase() throws Exception {
    // Choosing the 2nd function, since the 1st one is duplicated in the dummy database
    Function function = testFunctions[1];
    client.getFunction(OTHER_DATABASE, function.getFunctionName());
}
Also used : Function(org.apache.hadoop.hive.metastore.api.Function) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 54 with Function

use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.

the class TestFunctions method testDropFunctionNoSuchDatabase.

@Test(expected = NoSuchObjectException.class)
public void testDropFunctionNoSuchDatabase() throws Exception {
    // Choosing the 2nd function, since the 1st one is duplicated in the dummy database
    Function function = testFunctions[1];
    client.dropFunction("no_such_database", function.getFunctionName());
}
Also used : Function(org.apache.hadoop.hive.metastore.api.Function) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 55 with Function

use of org.apache.hadoop.hive.metastore.api.Function in project hive by apache.

the class TestFunctions method testGetFunctionNoSuchFunction.

@Test(expected = NoSuchObjectException.class)
public void testGetFunctionNoSuchFunction() throws Exception {
    // Choosing the 2nd function, since the 1st one is duplicated in the dummy database
    Function function = testFunctions[1];
    client.getFunction(function.getDbName(), "no_such_function");
}
Also used : Function(org.apache.hadoop.hive.metastore.api.Function) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Aggregations

Function (org.apache.hadoop.hive.metastore.api.Function)69 Test (org.junit.Test)47 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)38 ResourceUri (org.apache.hadoop.hive.metastore.api.ResourceUri)17 TTransportException (org.apache.thrift.transport.TTransportException)11 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)9 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)8 ArrayList (java.util.ArrayList)7 IOException (java.io.IOException)6 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)6 FunctionBuilder (org.apache.hadoop.hive.metastore.client.builder.FunctionBuilder)6 Database (org.apache.hadoop.hive.metastore.api.Database)4 NotificationEvent (org.apache.hadoop.hive.metastore.api.NotificationEvent)4 TApplicationException (org.apache.thrift.TApplicationException)4 TException (org.apache.thrift.TException)4 HashSet (java.util.HashSet)3 Path (org.apache.hadoop.fs.Path)3 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)3 Partition (org.apache.hadoop.hive.metastore.api.Partition)3 SQLException (java.sql.SQLException)2