Search in sources :

Example 1 with DataConnector

use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.

the class CreateDataConnectorAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    boolean ifNotExists = false;
    String comment = null;
    String url = null;
    String type = null;
    Map<String, String> props = null;
    String connectorName = unescapeIdentifier(root.getChild(0).getText());
    for (int i = 1; i < root.getChildCount(); i++) {
        ASTNode childNode = (ASTNode) root.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveParser.TOK_DATACONNECTORCOMMENT:
                comment = unescapeSQLString(childNode.getChild(0).getText());
                break;
            case HiveParser.TOK_DATACONNECTORPROPERTIES:
                props = getProps((ASTNode) childNode.getChild(0));
                break;
            case HiveParser.TOK_DATACONNECTORURL:
                url = unescapeSQLString(childNode.getChild(0).getText());
                // outputs.add(toWriteEntity(url));
                break;
            case HiveParser.TOK_DATACONNECTORTYPE:
                type = unescapeSQLString(childNode.getChild(0).getText());
                break;
            default:
                throw new SemanticException("Unrecognized token in CREATE CONNECTOR statement");
        }
    }
    CreateDataConnectorDesc desc = null;
    DataConnector connector = new DataConnector(connectorName, type, url);
    if (comment != null)
        connector.setDescription(comment);
    if (props != null)
        connector.setParameters(props);
    desc = new CreateDataConnectorDesc(connectorName, type, url, ifNotExists, comment, props);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    outputs.add(new WriteEntity(connector, WriteEntity.WriteType.DDL_NO_LOCK));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 2 with DataConnector

use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.

the class AbstractAlterDataConnectorAnalyzer method addAlterDataConnectorDesc.

protected void addAlterDataConnectorDesc(AbstractAlterDataConnectorDesc alterDesc) throws SemanticException {
    DataConnector connector = getDataConnector(alterDesc.getConnectorName());
    outputs.add(new WriteEntity(connector, WriteEntity.WriteType.DDL_NO_LOCK));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc)));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 3 with DataConnector

use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.

the class DropDataConnectorAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String connectorName = unescapeIdentifier(root.getChild(0).getText());
    boolean ifExists = root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null;
    DataConnector connector = getDataConnector(connectorName, !ifExists);
    if (connector == null) {
        return;
    }
    inputs.add(new ReadEntity(connector));
    outputs.add(new WriteEntity(connector, WriteEntity.WriteType.DDL_EXCLUSIVE));
    DropDataConnectorDesc desc = new DropDataConnectorDesc(connectorName, ifExists);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 4 with DataConnector

use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.

the class TestHiveMetaStore method testDataConnector.

@Test
public void testDataConnector() throws Throwable {
    final String connector_name1 = "test_connector1";
    final String connector_name2 = "test_connector2";
    final String mysql_type = "mysql";
    final String mysql_url = "jdbc:mysql://nightly1.apache.org:3306/hive1";
    final String postgres_type = "postgres";
    final String postgres_url = "jdbc:postgresql://localhost:5432";
    try {
        DataConnector connector = new DataConnector(connector_name1, mysql_type, mysql_url);
        Map<String, String> params = new HashMap<>();
        params.put(AbstractJDBCConnectorProvider.JDBC_USERNAME, "hive");
        params.put(AbstractJDBCConnectorProvider.JDBC_PASSWORD, "hive");
        connector.setParameters(params);
        client.createDataConnector(connector);
        DataConnector dConn = client.getDataConnector(connector_name1);
        assertNotNull(dConn);
        assertEquals("name of returned data connector is different from that of inserted connector", connector_name1, dConn.getName());
        assertEquals("type of data connector returned is different from the type inserted", mysql_type, dConn.getType());
        assertEquals("url of the data connector returned is different from the url inserted", mysql_url, dConn.getUrl());
        // assertEquals(SecurityUtils.getUser(), dConn.getOwnerName());
        assertEquals(PrincipalType.USER, dConn.getOwnerType());
        assertNotEquals("Size of data connector parameters not as expected", 0, dConn.getParametersSize());
        try {
            client.createDataConnector(connector);
            fail("Creating duplicate connector should fail");
        } catch (Exception e) {
        /* as expected */
        }
        connector = new DataConnector(connector_name2, postgres_type, postgres_url);
        params = new HashMap<>();
        params.put(AbstractJDBCConnectorProvider.JDBC_USERNAME, "hive");
        params.put(AbstractJDBCConnectorProvider.JDBC_PASSWORD, "hive");
        connector.setParameters(params);
        client.createDataConnector(connector);
        dConn = client.getDataConnector(connector_name2);
        assertEquals("name of returned data connector is different from that of inserted connector", connector_name2, dConn.getName());
        assertEquals("type of data connector returned is different from the type inserted", postgres_type, dConn.getType());
        assertEquals("url of the data connector returned is different from the url inserted", postgres_url, dConn.getUrl());
        List<String> connectors = client.getAllDataConnectorNames();
        assertEquals("Number of dataconnectors returned is not as expected", 2, connectors.size());
        DataConnector connector1 = new DataConnector(connector);
        connector1.setUrl(mysql_url);
        client.alterDataConnector(connector.getName(), connector1);
        dConn = client.getDataConnector(connector.getName());
        assertEquals("url of the data connector returned is different from the url inserted", mysql_url, dConn.getUrl());
        // alter data connector parameters
        params.put(AbstractJDBCConnectorProvider.JDBC_NUM_PARTITIONS, "5");
        connector1.setParameters(params);
        client.alterDataConnector(connector.getName(), connector1);
        dConn = client.getDataConnector(connector.getName());
        assertEquals("Size of data connector parameters not as expected", 3, dConn.getParametersSize());
        // alter data connector parameters
        connector1.setOwnerName("hiveadmin");
        connector1.setOwnerType(PrincipalType.ROLE);
        client.alterDataConnector(connector.getName(), connector1);
        dConn = client.getDataConnector(connector.getName());
        assertEquals("Data connector owner name not as expected", "hiveadmin", dConn.getOwnerName());
        assertEquals("Data connector owner type not as expected", PrincipalType.ROLE, dConn.getOwnerType());
        client.dropDataConnector(connector_name1, false, false);
        connectors = client.getAllDataConnectorNames();
        assertEquals("Number of dataconnectors returned is not as expected", 1, connectors.size());
        client.dropDataConnector(connector_name2, false, false);
        connectors = client.getAllDataConnectorNames();
        assertEquals("Number of dataconnectors returned is not as expected", 0, connectors.size());
    } catch (Throwable e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testDataConnector() failed.");
        throw e;
    }
}
Also used : HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test)

Example 5 with DataConnector

use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.

the class CreateDatabaseAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String databaseName = unescapeIdentifier(root.getChild(0).getText());
    boolean ifNotExists = false;
    String comment = null;
    String locationUri = null;
    String managedLocationUri = null;
    String type = DatabaseType.NATIVE.name();
    String connectorName = null;
    Map<String, String> props = null;
    for (int i = 1; i < root.getChildCount(); i++) {
        ASTNode childNode = (ASTNode) root.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveParser.TOK_DATABASECOMMENT:
                comment = unescapeSQLString(childNode.getChild(0).getText());
                break;
            case HiveParser.TOK_DATABASEPROPERTIES:
                props = getProps((ASTNode) childNode.getChild(0));
                break;
            case HiveParser.TOK_DATABASELOCATION:
                locationUri = unescapeSQLString(childNode.getChild(0).getText());
                outputs.add(toWriteEntity(locationUri));
                break;
            case HiveParser.TOK_DATABASE_MANAGEDLOCATION:
                managedLocationUri = unescapeSQLString(childNode.getChild(0).getText());
                outputs.add(toWriteEntity(managedLocationUri));
                break;
            case HiveParser.TOK_DATACONNECTOR:
                type = DatabaseType.REMOTE.name();
                ASTNode nextNode = (ASTNode) root.getChild(i);
                connectorName = ((ASTNode) nextNode).getChild(0).getText();
                DataConnector connector = getDataConnector(connectorName, true);
                if (connector == null) {
                    throw new SemanticException("Cannot retrieve connector with name: " + connectorName);
                }
                inputs.add(new ReadEntity(connector));
                break;
            default:
                throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
        }
    }
    CreateDatabaseDesc desc = null;
    Database database = new Database(databaseName, comment, locationUri, props);
    if (type.equalsIgnoreCase(DatabaseType.NATIVE.name())) {
        desc = new CreateDatabaseDesc(databaseName, comment, locationUri, managedLocationUri, ifNotExists, props);
        database.setType(DatabaseType.NATIVE);
        // database = new Database(databaseName, comment, locationUri, props);
        if (managedLocationUri != null) {
            database.setManagedLocationUri(managedLocationUri);
        }
    } else {
        String remoteDbName = databaseName;
        if (// TODO finalize the property name
        props != null && props.get("connector.remoteDbName") != null)
            remoteDbName = props.get("connector.remoteDbName");
        desc = new CreateDatabaseDesc(databaseName, comment, locationUri, null, ifNotExists, props, type, connectorName, remoteDbName);
        database.setConnector_name(connectorName);
        database.setType(DatabaseType.REMOTE);
        database.setRemote_dbname(remoteDbName);
    }
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Database(org.apache.hadoop.hive.metastore.api.Database) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

DataConnector (org.apache.hadoop.hive.metastore.api.DataConnector)12 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)4 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)4 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)4 Database (org.apache.hadoop.hive.metastore.api.Database)3 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)3 HashMap (java.util.HashMap)2 LinkedHashMap (java.util.LinkedHashMap)2 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)2 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)2 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 Test (org.junit.Test)2 DataOutputStream (java.io.DataOutputStream)1 IOException (java.io.IOException)1 URI (java.net.URI)1 URISyntaxException (java.net.URISyntaxException)1 SQLException (java.sql.SQLException)1 Path (org.apache.hadoop.fs.Path)1 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)1