use of org.apache.hadoop.hive.metastore.model.MFieldSchema in project hive by apache.
the class ObjectStore method dropPartitionCommon.
/**
* Drop an MPartition and cascade deletes (e.g., delete partition privilege grants,
* drop the storage descriptor cleanly, etc.)
* @param part - the MPartition to drop
* @return whether the transaction committed successfully
* @throws InvalidInputException
* @throws InvalidObjectException
* @throws MetaException
* @throws NoSuchObjectException
*/
private boolean dropPartitionCommon(MPartition part) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
boolean success = false;
try {
openTransaction();
if (part != null) {
List<MFieldSchema> schemas = part.getTable().getPartitionKeys();
List<String> colNames = new ArrayList<String>();
for (MFieldSchema col : schemas) {
colNames.add(col.getName());
}
String partName = FileUtils.makePartName(colNames, part.getValues());
List<MPartitionPrivilege> partGrants = listPartitionGrants(part.getTable().getDatabase().getName(), part.getTable().getTableName(), Lists.newArrayList(partName));
if (partGrants != null && partGrants.size() > 0) {
pm.deletePersistentAll(partGrants);
}
List<MPartitionColumnPrivilege> partColumnGrants = listPartitionAllColumnGrants(part.getTable().getDatabase().getName(), part.getTable().getTableName(), Lists.newArrayList(partName));
if (partColumnGrants != null && partColumnGrants.size() > 0) {
pm.deletePersistentAll(partColumnGrants);
}
String dbName = part.getTable().getDatabase().getName();
String tableName = part.getTable().getTableName();
// delete partition level column stats if it exists
try {
deletePartitionColumnStatistics(dbName, tableName, partName, part.getValues(), null);
} catch (NoSuchObjectException e) {
LOG.info("No column statistics records found to delete");
}
preDropStorageDescriptor(part.getSd());
pm.deletePersistent(part);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
use of org.apache.hadoop.hive.metastore.model.MFieldSchema in project hive by apache.
the class ObjectStore method getType.
private Type getType(MType mtype) {
List<FieldSchema> fields = new ArrayList<FieldSchema>();
if (mtype.getFields() != null) {
for (MFieldSchema field : mtype.getFields()) {
fields.add(new FieldSchema(field.getName(), field.getType(), field.getComment()));
}
}
Type ret = new Type();
ret.setName(mtype.getName());
ret.setType1(mtype.getType1());
ret.setType2(mtype.getType2());
ret.setFields(fields);
return ret;
}
use of org.apache.hadoop.hive.metastore.model.MFieldSchema in project hive by apache.
the class ObjectStore method addForeignKeys.
private void addForeignKeys(List<SQLForeignKey> fks, boolean retrieveCD) throws InvalidObjectException, MetaException {
List<MConstraint> mpkfks = new ArrayList<MConstraint>();
String currentConstraintName = null;
for (int i = 0; i < fks.size(); i++) {
AttachedMTableInfo nParentTable = getMTable(fks.get(i).getPktable_db(), fks.get(i).getPktable_name(), retrieveCD);
MTable parentTable = nParentTable.mtbl;
if (parentTable == null) {
throw new InvalidObjectException("Parent table not found: " + fks.get(i).getPktable_name());
}
AttachedMTableInfo nChildTable = getMTable(fks.get(i).getFktable_db(), fks.get(i).getFktable_name(), retrieveCD);
MTable childTable = nChildTable.mtbl;
if (childTable == null) {
throw new InvalidObjectException("Child table not found: " + fks.get(i).getFktable_name());
}
MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
List<MFieldSchema> parentCols = parentCD == null ? null : parentCD.getCols();
int parentIntegerIndex = getColumnIndexFromTableColumns(parentCols, fks.get(i).getPkcolumn_name());
if (parentIntegerIndex == -1) {
throw new InvalidObjectException("Parent column not found: " + fks.get(i).getPkcolumn_name());
}
MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
List<MFieldSchema> childCols = childCD.getCols();
int childIntegerIndex = getColumnIndexFromTableColumns(childCols, fks.get(i).getFkcolumn_name());
if (childIntegerIndex == -1) {
throw new InvalidObjectException("Child column not found: " + fks.get(i).getFkcolumn_name());
}
if (fks.get(i).getFk_name() == null) {
// the uniqueness of the generated constraint name.
if (fks.get(i).getKey_seq() == 1) {
currentConstraintName = generateConstraintName(fks.get(i).getFktable_db(), fks.get(i).getFktable_name(), fks.get(i).getPktable_db(), fks.get(i).getPktable_name(), fks.get(i).getPkcolumn_name(), fks.get(i).getFkcolumn_name(), "fk");
}
} else {
currentConstraintName = fks.get(i).getFk_name();
}
Integer updateRule = fks.get(i).getUpdate_rule();
Integer deleteRule = fks.get(i).getDelete_rule();
int enableValidateRely = (fks.get(i).isEnable_cstr() ? 4 : 0) + (fks.get(i).isValidate_cstr() ? 2 : 0) + (fks.get(i).isRely_cstr() ? 1 : 0);
MConstraint mpkfk = new MConstraint(currentConstraintName, MConstraint.FOREIGN_KEY_CONSTRAINT, fks.get(i).getKey_seq(), deleteRule, updateRule, enableValidateRely, parentTable, childTable, parentCD, childCD, childIntegerIndex, parentIntegerIndex);
mpkfks.add(mpkfk);
}
pm.makePersistentAll(mpkfks);
}
use of org.apache.hadoop.hive.metastore.model.MFieldSchema in project hive by apache.
the class ObjectStore method convertToStorageDescriptor.
// MSD and SD should be same objects. Not sure how to make then same right now
// MSerdeInfo *& SerdeInfo should be same as well
private StorageDescriptor convertToStorageDescriptor(MStorageDescriptor msd, boolean noFS) throws MetaException {
if (msd == null) {
return null;
}
List<MFieldSchema> mFieldSchemas = msd.getCD() == null ? null : msd.getCD().getCols();
StorageDescriptor sd = new StorageDescriptor(noFS ? null : convertToFieldSchemas(mFieldSchemas), msd.getLocation(), msd.getInputFormat(), msd.getOutputFormat(), msd.isCompressed(), msd.getNumBuckets(), convertToSerDeInfo(msd.getSerDeInfo()), convertList(msd.getBucketCols()), convertToOrders(msd.getSortCols()), convertMap(msd.getParameters()));
SkewedInfo skewedInfo = new SkewedInfo(convertList(msd.getSkewedColNames()), convertToSkewedValues(msd.getSkewedColValues()), covertToSkewedMap(msd.getSkewedColValueLocationMaps()));
sd.setSkewedInfo(skewedInfo);
sd.setStoredAsSubDirectories(msd.isStoredAsSubDirectories());
return sd;
}
Aggregations