Search in sources :

Example 6 with DataConnector

use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.

the class AbstractAlterDataConnectorOperation method execute.

@Override
public int execute() throws HiveException {
    String dcName = desc.getConnectorName();
    DataConnector connector = context.getDb().getDataConnector(dcName);
    if (connector == null) {
        throw new HiveException(ErrorMsg.DATACONNECTOR_NOT_EXISTS, dcName);
    }
    Map<String, String> params = connector.getParameters();
    // this call is to set the values from the alter descriptor onto the connector object
    doAlteration(connector);
    // This is the HMS metadata operation to modify the object
    context.getDb().alterDataConnector(connector.getName(), connector);
    return 0;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector)

Example 7 with DataConnector

use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.

the class CreateDataConnectorOperation method execute.

@Override
public int execute() throws HiveException {
    try {
        URI connectorUri = new URI(desc.getURL());
        if (!connectorUri.isAbsolute() || StringUtils.isBlank(connectorUri.getScheme())) {
            // TODO make a new error message for URL
            throw new HiveException(ErrorMsg.INVALID_PATH, desc.getURL());
        }
        DataConnector connector = new DataConnector(desc.getName(), desc.getType(), desc.getURL());
        if (desc.getComment() != null)
            connector.setDescription(desc.getComment());
        connector.setOwnerName(SessionState.getUserFromAuthenticator());
        connector.setOwnerType(PrincipalType.USER);
        if (desc.getConnectorProperties() != null)
            connector.setParameters(desc.getConnectorProperties());
        try {
            context.getDb().createDataConnector(connector, desc.getIfNotExists());
        } catch (AlreadyExistsException ex) {
            // it would be better if AlreadyExistsException had an errorCode field....
            throw new HiveException(ex, ErrorMsg.DATACONNECTOR_ALREADY_EXISTS, desc.getName());
        }
        return 0;
    } catch (URISyntaxException e) {
        throw new HiveException(e);
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) URISyntaxException(java.net.URISyntaxException) URI(java.net.URI) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector)

Example 8 with DataConnector

use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.

the class DescDataConnectorOperation method execute.

@Override
public int execute() throws HiveException {
    try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) {
        DataConnector connector = context.getDb().getDataConnector(desc.getConnectorName());
        if (connector == null) {
            throw new HiveException(ErrorMsg.DATACONNECTOR_NOT_EXISTS, desc.getConnectorName());
        }
        SortedMap<String, String> params = null;
        if (desc.isExtended()) {
            params = new TreeMap<>(connector.getParameters());
        }
        DescDataConnectorFormatter formatter = DescDataConnectorFormatter.getFormatter(context.getConf());
        formatter.showDataConnectorDescription(outStream, connector.getName(), connector.getType(), connector.getUrl(), connector.getOwnerName(), connector.getOwnerType(), connector.getDescription(), params);
    } catch (Exception e) {
        throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
    }
    return 0;
}
Also used : Path(org.apache.hadoop.fs.Path) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) DataOutputStream(java.io.DataOutputStream) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 9 with DataConnector

use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.

the class CommandAuthorizerV2 method addHivePrivObject.

private static void addHivePrivObject(Entity privObject, Map<String, List<String>> tableName2Cols, List<HivePrivilegeObject> hivePrivObjs, HiveOperationType hiveOpType) throws HiveException {
    HivePrivilegeObjectType privObjType = AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType());
    HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject);
    HivePrivilegeObject hivePrivObject = null;
    switch(privObject.getType()) {
        case DATABASE:
            Database database = privObject.getDatabase();
            hivePrivObject = new HivePrivilegeObject(privObjType, database.getName(), null, null, null, actionType, null, null, database.getOwnerName(), database.getOwnerType());
            break;
        case TABLE:
            Table table = privObject.getTable();
            List<String> columns = tableName2Cols == null ? null : tableName2Cols.get(Table.getCompleteName(table.getDbName(), table.getTableName()));
            hivePrivObject = new HivePrivilegeObject(privObjType, table.getDbName(), table.getTableName(), null, columns, actionType, null, null, table.getOwner(), table.getOwnerType());
            if (table.getStorageHandler() != null) {
                // TODO: add hive privilege object for storage based handlers for create and alter table commands.
                if (hiveOpType == HiveOperationType.CREATETABLE || hiveOpType == HiveOperationType.ALTERTABLE_PROPERTIES || hiveOpType == HiveOperationType.CREATETABLE_AS_SELECT) {
                    try {
                        String storageUri = table.getStorageHandler().getURIForAuth(table.getTTable()).toString();
                        hivePrivObjs.add(new HivePrivilegeObject(HivePrivilegeObjectType.STORAGEHANDLER_URI, null, storageUri, null, null, actionType, null, table.getStorageHandler().getClass().getName(), table.getOwner(), table.getOwnerType()));
                    } catch (Exception ex) {
                        LOG.error("Exception occurred while getting the URI from storage handler: " + ex.getMessage(), ex);
                        throw new HiveException("Exception occurred while getting the URI from storage handler: " + ex.getMessage());
                    }
                }
            }
            break;
        case DFS_DIR:
        case LOCAL_DIR:
            hivePrivObject = new HivePrivilegeObject(privObjType, null, privObject.getD().toString(), null, null, actionType, null, null, null, null);
            break;
        case FUNCTION:
            String dbName = privObject.getDatabase() != null ? privObject.getDatabase().getName() : null;
            hivePrivObject = new HivePrivilegeObject(privObjType, dbName, privObject.getFunctionName(), null, null, actionType, null, privObject.getClassName(), null, null);
            break;
        case DUMMYPARTITION:
        case PARTITION:
            // TODO: not currently handled
            return;
        case SERVICE_NAME:
            hivePrivObject = new HivePrivilegeObject(privObjType, null, privObject.getServiceName(), null, null, actionType, null, null, null, null);
            break;
        case DATACONNECTOR:
            DataConnector connector = privObject.getDataConnector();
            hivePrivObject = new HivePrivilegeObject(privObjType, null, connector.getName(), null, null, actionType, null, null, connector.getOwnerName(), connector.getOwnerType());
            break;
        default:
            throw new AssertionError("Unexpected object type");
    }
    hivePrivObjs.add(hivePrivObject);
}
Also used : Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HivePrivObjectActionType(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType) Database(org.apache.hadoop.hive.metastore.api.Database) HivePrivilegeObjectType(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 10 with DataConnector

use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.

the class DataConnectorProviderFactory method getDataConnectorProvider.

public static synchronized IDataConnectorProvider getDataConnectorProvider(Database db) throws MetaException {
    IDataConnectorProvider provider = null;
    DataConnector connector = null;
    if (db.getType() == DatabaseType.NATIVE) {
        throw new MetaException("Database " + db.getName() + " is of type NATIVE, no connector available");
    }
    String scopedDb = (db.getRemote_dbname() != null) ? db.getRemote_dbname() : db.getName();
    if (cache.containsKey(db.getConnector_name().toLowerCase())) {
        provider = cache.get(db.getConnector_name().toLowerCase());
        if (provider != null) {
            provider.setScope(scopedDb);
        }
        return provider;
    }
    try {
        connector = hmsHandler.get_dataconnector_core(db.getConnector_name());
    } catch (NoSuchObjectException notexists) {
        throw new MetaException("Data connector " + db.getConnector_name() + " associated with database " + db.getName() + " does not exist");
    }
    String type = connector.getType();
    switch(type) {
        case DERBY_TYPE:
        case MSSQL_TYPE:
        case MYSQL_TYPE:
        case ORACLE_TYPE:
        case POSTGRES_TYPE:
            try {
                provider = JDBCConnectorProviderFactory.get(scopedDb, connector);
            } catch (Exception e) {
                throw new MetaException("Could not instantiate a provider for database " + db.getName());
            }
            break;
        default:
            throw new MetaException("Data connector of type " + connector.getType() + " not implemented yet");
    }
    cache.put(connector.getName().toLowerCase(), provider);
    return provider;
}
Also used : IDataConnectorProvider(org.apache.hadoop.hive.metastore.dataconnector.IDataConnectorProvider) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Aggregations

DataConnector (org.apache.hadoop.hive.metastore.api.DataConnector)12 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)4 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)4 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)4 Database (org.apache.hadoop.hive.metastore.api.Database)3 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)3 HashMap (java.util.HashMap)2 LinkedHashMap (java.util.LinkedHashMap)2 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)2 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)2 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 Test (org.junit.Test)2 DataOutputStream (java.io.DataOutputStream)1 IOException (java.io.IOException)1 URI (java.net.URI)1 URISyntaxException (java.net.URISyntaxException)1 SQLException (java.sql.SQLException)1 Path (org.apache.hadoop.fs.Path)1 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)1