use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.
the class AbstractAlterDataConnectorOperation method execute.
@Override
public int execute() throws HiveException {
String dcName = desc.getConnectorName();
DataConnector connector = context.getDb().getDataConnector(dcName);
if (connector == null) {
throw new HiveException(ErrorMsg.DATACONNECTOR_NOT_EXISTS, dcName);
}
Map<String, String> params = connector.getParameters();
// this call is to set the values from the alter descriptor onto the connector object
doAlteration(connector);
// This is the HMS metadata operation to modify the object
context.getDb().alterDataConnector(connector.getName(), connector);
return 0;
}
use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.
the class CreateDataConnectorOperation method execute.
@Override
public int execute() throws HiveException {
try {
URI connectorUri = new URI(desc.getURL());
if (!connectorUri.isAbsolute() || StringUtils.isBlank(connectorUri.getScheme())) {
// TODO make a new error message for URL
throw new HiveException(ErrorMsg.INVALID_PATH, desc.getURL());
}
DataConnector connector = new DataConnector(desc.getName(), desc.getType(), desc.getURL());
if (desc.getComment() != null)
connector.setDescription(desc.getComment());
connector.setOwnerName(SessionState.getUserFromAuthenticator());
connector.setOwnerType(PrincipalType.USER);
if (desc.getConnectorProperties() != null)
connector.setParameters(desc.getConnectorProperties());
try {
context.getDb().createDataConnector(connector, desc.getIfNotExists());
} catch (AlreadyExistsException ex) {
// it would be better if AlreadyExistsException had an errorCode field....
throw new HiveException(ex, ErrorMsg.DATACONNECTOR_ALREADY_EXISTS, desc.getName());
}
return 0;
} catch (URISyntaxException e) {
throw new HiveException(e);
}
}
use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.
the class DescDataConnectorOperation method execute.
@Override
public int execute() throws HiveException {
try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) {
DataConnector connector = context.getDb().getDataConnector(desc.getConnectorName());
if (connector == null) {
throw new HiveException(ErrorMsg.DATACONNECTOR_NOT_EXISTS, desc.getConnectorName());
}
SortedMap<String, String> params = null;
if (desc.isExtended()) {
params = new TreeMap<>(connector.getParameters());
}
DescDataConnectorFormatter formatter = DescDataConnectorFormatter.getFormatter(context.getConf());
formatter.showDataConnectorDescription(outStream, connector.getName(), connector.getType(), connector.getUrl(), connector.getOwnerName(), connector.getOwnerType(), connector.getDescription(), params);
} catch (Exception e) {
throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
}
return 0;
}
use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.
the class CommandAuthorizerV2 method addHivePrivObject.
private static void addHivePrivObject(Entity privObject, Map<String, List<String>> tableName2Cols, List<HivePrivilegeObject> hivePrivObjs, HiveOperationType hiveOpType) throws HiveException {
HivePrivilegeObjectType privObjType = AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType());
HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject);
HivePrivilegeObject hivePrivObject = null;
switch(privObject.getType()) {
case DATABASE:
Database database = privObject.getDatabase();
hivePrivObject = new HivePrivilegeObject(privObjType, database.getName(), null, null, null, actionType, null, null, database.getOwnerName(), database.getOwnerType());
break;
case TABLE:
Table table = privObject.getTable();
List<String> columns = tableName2Cols == null ? null : tableName2Cols.get(Table.getCompleteName(table.getDbName(), table.getTableName()));
hivePrivObject = new HivePrivilegeObject(privObjType, table.getDbName(), table.getTableName(), null, columns, actionType, null, null, table.getOwner(), table.getOwnerType());
if (table.getStorageHandler() != null) {
// TODO: add hive privilege object for storage based handlers for create and alter table commands.
if (hiveOpType == HiveOperationType.CREATETABLE || hiveOpType == HiveOperationType.ALTERTABLE_PROPERTIES || hiveOpType == HiveOperationType.CREATETABLE_AS_SELECT) {
try {
String storageUri = table.getStorageHandler().getURIForAuth(table.getTTable()).toString();
hivePrivObjs.add(new HivePrivilegeObject(HivePrivilegeObjectType.STORAGEHANDLER_URI, null, storageUri, null, null, actionType, null, table.getStorageHandler().getClass().getName(), table.getOwner(), table.getOwnerType()));
} catch (Exception ex) {
LOG.error("Exception occurred while getting the URI from storage handler: " + ex.getMessage(), ex);
throw new HiveException("Exception occurred while getting the URI from storage handler: " + ex.getMessage());
}
}
}
break;
case DFS_DIR:
case LOCAL_DIR:
hivePrivObject = new HivePrivilegeObject(privObjType, null, privObject.getD().toString(), null, null, actionType, null, null, null, null);
break;
case FUNCTION:
String dbName = privObject.getDatabase() != null ? privObject.getDatabase().getName() : null;
hivePrivObject = new HivePrivilegeObject(privObjType, dbName, privObject.getFunctionName(), null, null, actionType, null, privObject.getClassName(), null, null);
break;
case DUMMYPARTITION:
case PARTITION:
// TODO: not currently handled
return;
case SERVICE_NAME:
hivePrivObject = new HivePrivilegeObject(privObjType, null, privObject.getServiceName(), null, null, actionType, null, null, null, null);
break;
case DATACONNECTOR:
DataConnector connector = privObject.getDataConnector();
hivePrivObject = new HivePrivilegeObject(privObjType, null, connector.getName(), null, null, actionType, null, null, connector.getOwnerName(), connector.getOwnerType());
break;
default:
throw new AssertionError("Unexpected object type");
}
hivePrivObjs.add(hivePrivObject);
}
use of org.apache.hadoop.hive.metastore.api.DataConnector in project hive by apache.
the class DataConnectorProviderFactory method getDataConnectorProvider.
public static synchronized IDataConnectorProvider getDataConnectorProvider(Database db) throws MetaException {
IDataConnectorProvider provider = null;
DataConnector connector = null;
if (db.getType() == DatabaseType.NATIVE) {
throw new MetaException("Database " + db.getName() + " is of type NATIVE, no connector available");
}
String scopedDb = (db.getRemote_dbname() != null) ? db.getRemote_dbname() : db.getName();
if (cache.containsKey(db.getConnector_name().toLowerCase())) {
provider = cache.get(db.getConnector_name().toLowerCase());
if (provider != null) {
provider.setScope(scopedDb);
}
return provider;
}
try {
connector = hmsHandler.get_dataconnector_core(db.getConnector_name());
} catch (NoSuchObjectException notexists) {
throw new MetaException("Data connector " + db.getConnector_name() + " associated with database " + db.getName() + " does not exist");
}
String type = connector.getType();
switch(type) {
case DERBY_TYPE:
case MSSQL_TYPE:
case MYSQL_TYPE:
case ORACLE_TYPE:
case POSTGRES_TYPE:
try {
provider = JDBCConnectorProviderFactory.get(scopedDb, connector);
} catch (Exception e) {
throw new MetaException("Could not instantiate a provider for database " + db.getName());
}
break;
default:
throw new MetaException("Data connector of type " + connector.getType() + " not implemented yet");
}
cache.put(connector.getName().toLowerCase(), provider);
return provider;
}
Aggregations