use of org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet in project hive by apache.
the class HBaseStore method getUserPrivilegeSet.
@Override
public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, List<String> groupNames) throws InvalidObjectException, MetaException {
boolean commit = false;
openTransaction();
try {
PrincipalPrivilegeSet pps = new PrincipalPrivilegeSet();
PrincipalPrivilegeSet global = getHBase().getGlobalPrivs();
if (global == null)
return null;
List<PrivilegeGrantInfo> pgi;
if (global.getUserPrivileges() != null) {
pgi = global.getUserPrivileges().get(userName);
if (pgi != null) {
pps.putToUserPrivileges(userName, pgi);
}
}
if (global.getRolePrivileges() != null) {
List<String> roles = getHBase().getUserRoles(userName);
if (roles != null) {
for (String role : roles) {
pgi = global.getRolePrivileges().get(role);
if (pgi != null) {
pps.putToRolePrivileges(role, pgi);
}
}
}
}
commit = true;
return pps;
} catch (IOException e) {
LOG.error("Unable to get db privileges for user", e);
throw new MetaException("Unable to get db privileges for user, " + e.getMessage());
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet in project hive by apache.
the class HBaseStore method getTablePrivilegeSet.
@Override
public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName, String tableName, String userName, List<String> groupNames) throws InvalidObjectException, MetaException {
boolean commit = false;
openTransaction();
try {
PrincipalPrivilegeSet pps = new PrincipalPrivilegeSet();
Table table = getHBase().getTable(dbName, tableName);
List<PrivilegeGrantInfo> pgi;
if (table.getPrivileges() != null) {
if (table.getPrivileges().getUserPrivileges() != null) {
pgi = table.getPrivileges().getUserPrivileges().get(userName);
if (pgi != null) {
pps.putToUserPrivileges(userName, pgi);
}
}
if (table.getPrivileges().getRolePrivileges() != null) {
List<String> roles = getHBase().getUserRoles(userName);
if (roles != null) {
for (String role : roles) {
pgi = table.getPrivileges().getRolePrivileges().get(role);
if (pgi != null) {
pps.putToRolePrivileges(role, pgi);
}
}
}
}
}
commit = true;
return pps;
} catch (IOException e) {
LOG.error("Unable to get db privileges for user", e);
throw new MetaException("Unable to get db privileges for user, " + e.getMessage());
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet in project hive by apache.
the class HBaseStore method listPrincipalDBGrants.
@Override
public List<HiveObjectPrivilege> listPrincipalDBGrants(String principalName, PrincipalType principalType, String dbName) {
List<PrivilegeGrantInfo> grants;
List<HiveObjectPrivilege> privileges = new ArrayList<HiveObjectPrivilege>();
boolean commit = false;
openTransaction();
try {
Database db = getHBase().getDb(dbName);
if (db == null)
return privileges;
PrincipalPrivilegeSet pps = db.getPrivileges();
if (pps == null)
return privileges;
Map<String, List<PrivilegeGrantInfo>> map;
switch(principalType) {
case USER:
map = pps.getUserPrivileges();
break;
case ROLE:
map = pps.getRolePrivileges();
break;
default:
throw new RuntimeException("Unknown or unsupported principal type " + principalType.toString());
}
if (map == null)
return privileges;
grants = map.get(principalName);
if (grants == null || grants.size() == 0)
return privileges;
for (PrivilegeGrantInfo pgi : grants) {
privileges.add(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.DATABASE, dbName, null, null, null), principalName, principalType, pgi));
}
commit = true;
return privileges;
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet in project hive by apache.
the class HBaseStore method listPrincipalDBGrantsAll.
@Override
public List<HiveObjectPrivilege> listPrincipalDBGrantsAll(String principalName, PrincipalType principalType) {
List<HiveObjectPrivilege> privileges = new ArrayList<HiveObjectPrivilege>();
boolean commit = false;
openTransaction();
try {
List<Database> dbs = getHBase().scanDatabases(null);
for (Database db : dbs) {
List<PrivilegeGrantInfo> grants;
PrincipalPrivilegeSet pps = db.getPrivileges();
if (pps == null)
continue;
Map<String, List<PrivilegeGrantInfo>> map;
switch(principalType) {
case USER:
map = pps.getUserPrivileges();
break;
case ROLE:
map = pps.getRolePrivileges();
break;
default:
throw new RuntimeException("Unknown or unsupported principal type " + principalType.toString());
}
if (map == null)
continue;
grants = map.get(principalName);
if (grants == null || grants.size() == 0)
continue;
for (PrivilegeGrantInfo pgi : grants) {
privileges.add(new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.DATABASE, db.getName(), null, null, null), principalName, principalType, pgi));
}
}
commit = true;
return privileges;
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet in project hive by apache.
the class Hive method createIndex.
/**
*
* @param tableName
* table name
* @param indexName
* index name
* @param indexHandlerClass
* index handler class
* @param indexedCols
* index columns
* @param indexTblName
* index table's name
* @param deferredRebuild
* referred build index table's data
* @param inputFormat
* input format
* @param outputFormat
* output format
* @param serde
* @param storageHandler
* index table's storage handler
* @param location
* location
* @param idxProps
* idx
* @param serdeProps
* serde properties
* @param collItemDelim
* @param fieldDelim
* @param fieldEscape
* @param lineDelim
* @param mapKeyDelim
* @throws HiveException
*/
public void createIndex(String tableName, String indexName, String indexHandlerClass, List<String> indexedCols, String indexTblName, boolean deferredRebuild, String inputFormat, String outputFormat, String serde, String storageHandler, String location, Map<String, String> idxProps, Map<String, String> tblProps, Map<String, String> serdeProps, String collItemDelim, String fieldDelim, String fieldEscape, String lineDelim, String mapKeyDelim, String indexComment) throws HiveException {
try {
String tdname = Utilities.getDatabaseName(tableName);
String idname = Utilities.getDatabaseName(indexTblName);
if (!idname.equals(tdname)) {
throw new HiveException("Index on different database (" + idname + ") from base table (" + tdname + ") is not supported.");
}
Index old_index = null;
try {
old_index = getIndex(tableName, indexName);
} catch (Exception e) {
}
if (old_index != null) {
throw new HiveException("Index " + indexName + " already exists on table " + tableName);
}
org.apache.hadoop.hive.metastore.api.Table baseTbl = getTable(tableName).getTTable();
if (TableType.VIRTUAL_VIEW.toString().equals(baseTbl.getTableType())) {
throw new HiveException("tableName=" + tableName + " is a VIRTUAL VIEW. Index on VIRTUAL VIEW is not supported.");
}
if (baseTbl.isTemporary()) {
throw new HiveException("tableName=" + tableName + " is a TEMPORARY TABLE. Index on TEMPORARY TABLE is not supported.");
}
org.apache.hadoop.hive.metastore.api.Table temp = null;
try {
temp = getTable(indexTblName).getTTable();
} catch (Exception e) {
}
if (temp != null) {
throw new HiveException("Table name " + indexTblName + " already exists. Choose another name.");
}
SerDeInfo serdeInfo = new SerDeInfo();
serdeInfo.setName(indexTblName);
if (serde != null) {
serdeInfo.setSerializationLib(serde);
} else {
if (storageHandler == null) {
serdeInfo.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
} else {
HiveStorageHandler sh = HiveUtils.getStorageHandler(getConf(), storageHandler);
String serDeClassName = sh.getSerDeClass().getName();
serdeInfo.setSerializationLib(serDeClassName);
}
}
serdeInfo.setParameters(new HashMap<String, String>());
if (fieldDelim != null) {
serdeInfo.getParameters().put(FIELD_DELIM, fieldDelim);
serdeInfo.getParameters().put(SERIALIZATION_FORMAT, fieldDelim);
}
if (fieldEscape != null) {
serdeInfo.getParameters().put(ESCAPE_CHAR, fieldEscape);
}
if (collItemDelim != null) {
serdeInfo.getParameters().put(COLLECTION_DELIM, collItemDelim);
}
if (mapKeyDelim != null) {
serdeInfo.getParameters().put(MAPKEY_DELIM, mapKeyDelim);
}
if (lineDelim != null) {
serdeInfo.getParameters().put(LINE_DELIM, lineDelim);
}
if (serdeProps != null) {
Iterator<Entry<String, String>> iter = serdeProps.entrySet().iterator();
while (iter.hasNext()) {
Entry<String, String> m = iter.next();
serdeInfo.getParameters().put(m.getKey(), m.getValue());
}
}
List<FieldSchema> indexTblCols = new ArrayList<FieldSchema>();
List<Order> sortCols = new ArrayList<Order>();
int k = 0;
Table metaBaseTbl = new Table(baseTbl);
// Even though we are storing these in metastore, get regular columns. Indexes on lengthy
// types from e.g. Avro schema will just fail to create the index table (by design).
List<FieldSchema> cols = metaBaseTbl.getCols();
for (int i = 0; i < cols.size(); i++) {
FieldSchema col = cols.get(i);
if (indexedCols.contains(col.getName())) {
indexTblCols.add(col);
sortCols.add(new Order(col.getName(), 1));
k++;
}
}
if (k != indexedCols.size()) {
throw new RuntimeException("Check the index columns, they should appear in the table being indexed.");
}
int time = (int) (System.currentTimeMillis() / 1000);
org.apache.hadoop.hive.metastore.api.Table tt = null;
HiveIndexHandler indexHandler = HiveUtils.getIndexHandler(this.getConf(), indexHandlerClass);
String itname = Utilities.getTableName(indexTblName);
if (indexHandler.usesIndexTable()) {
tt = new org.apache.hadoop.hive.ql.metadata.Table(idname, itname).getTTable();
List<FieldSchema> partKeys = baseTbl.getPartitionKeys();
tt.setPartitionKeys(partKeys);
tt.setTableType(TableType.INDEX_TABLE.toString());
if (tblProps != null) {
for (Entry<String, String> prop : tblProps.entrySet()) {
tt.putToParameters(prop.getKey(), prop.getValue());
}
}
SessionState ss = SessionState.get();
CreateTableAutomaticGrant grants;
if (ss != null && ((grants = ss.getCreateTableGrants()) != null)) {
PrincipalPrivilegeSet principalPrivs = new PrincipalPrivilegeSet();
principalPrivs.setUserPrivileges(grants.getUserGrants());
principalPrivs.setGroupPrivileges(grants.getGroupGrants());
principalPrivs.setRolePrivileges(grants.getRoleGrants());
tt.setPrivileges(principalPrivs);
}
}
if (!deferredRebuild) {
throw new RuntimeException("Please specify deferred rebuild using \" WITH DEFERRED REBUILD \".");
}
StorageDescriptor indexSd = new StorageDescriptor(indexTblCols, location, inputFormat, outputFormat, false, /*compressed - not used*/
-1, /*numBuckets - default is -1 when the table has no buckets*/
serdeInfo, null, /*bucketCols*/
sortCols, null);
String ttname = Utilities.getTableName(tableName);
Index indexDesc = new Index(indexName, indexHandlerClass, tdname, ttname, time, time, itname, indexSd, new HashMap<String, String>(), deferredRebuild);
if (indexComment != null) {
indexDesc.getParameters().put("comment", indexComment);
}
if (idxProps != null) {
indexDesc.getParameters().putAll(idxProps);
}
indexHandler.analyzeIndexDefinition(baseTbl, indexDesc, tt);
this.getMSC().createIndex(indexDesc, tt);
} catch (Exception e) {
throw new HiveException(e);
}
}
Aggregations