Search in sources :

Example 1 with MFieldSchema

use of org.apache.hadoop.hive.metastore.model.MFieldSchema in project hive by apache.

the class ObjectStore method dropPartitionCommon.

/**
   * Drop an MPartition and cascade deletes (e.g., delete partition privilege grants,
   *   drop the storage descriptor cleanly, etc.)
   * @param part - the MPartition to drop
   * @return whether the transaction committed successfully
   * @throws InvalidInputException
   * @throws InvalidObjectException
   * @throws MetaException
   * @throws NoSuchObjectException
   */
private boolean dropPartitionCommon(MPartition part) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
    boolean success = false;
    try {
        openTransaction();
        if (part != null) {
            List<MFieldSchema> schemas = part.getTable().getPartitionKeys();
            List<String> colNames = new ArrayList<String>();
            for (MFieldSchema col : schemas) {
                colNames.add(col.getName());
            }
            String partName = FileUtils.makePartName(colNames, part.getValues());
            List<MPartitionPrivilege> partGrants = listPartitionGrants(part.getTable().getDatabase().getName(), part.getTable().getTableName(), Lists.newArrayList(partName));
            if (partGrants != null && partGrants.size() > 0) {
                pm.deletePersistentAll(partGrants);
            }
            List<MPartitionColumnPrivilege> partColumnGrants = listPartitionAllColumnGrants(part.getTable().getDatabase().getName(), part.getTable().getTableName(), Lists.newArrayList(partName));
            if (partColumnGrants != null && partColumnGrants.size() > 0) {
                pm.deletePersistentAll(partColumnGrants);
            }
            String dbName = part.getTable().getDatabase().getName();
            String tableName = part.getTable().getTableName();
            // delete partition level column stats if it exists
            try {
                deletePartitionColumnStatistics(dbName, tableName, partName, part.getValues(), null);
            } catch (NoSuchObjectException e) {
                LOG.info("No column statistics records found to delete");
            }
            preDropStorageDescriptor(part.getSd());
            pm.deletePersistent(part);
        }
        success = commitTransaction();
    } finally {
        if (!success) {
            rollbackTransaction();
        }
    }
    return success;
}
Also used : MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) ArrayList(java.util.ArrayList) MPartitionPrivilege(org.apache.hadoop.hive.metastore.model.MPartitionPrivilege) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MPartitionColumnPrivilege(org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege)

Example 2 with MFieldSchema

use of org.apache.hadoop.hive.metastore.model.MFieldSchema in project hive by apache.

the class ObjectStore method getType.

private Type getType(MType mtype) {
    List<FieldSchema> fields = new ArrayList<FieldSchema>();
    if (mtype.getFields() != null) {
        for (MFieldSchema field : mtype.getFields()) {
            fields.add(new FieldSchema(field.getName(), field.getType(), field.getComment()));
        }
    }
    Type ret = new Type();
    ret.setName(mtype.getName());
    ret.setType1(mtype.getType1());
    ret.setType2(mtype.getType2());
    ret.setFields(fields);
    return ret;
}
Also used : FileMetadataExprType(org.apache.hadoop.hive.metastore.api.FileMetadataExprType) ResourceType(org.apache.hadoop.hive.metastore.api.ResourceType) PrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType) Type(org.apache.hadoop.hive.metastore.api.Type) HiveObjectType(org.apache.hadoop.hive.metastore.api.HiveObjectType) FunctionType(org.apache.hadoop.hive.metastore.api.FunctionType) MType(org.apache.hadoop.hive.metastore.model.MType) PartitionEventType(org.apache.hadoop.hive.metastore.api.PartitionEventType) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList)

Example 3 with MFieldSchema

use of org.apache.hadoop.hive.metastore.model.MFieldSchema in project hive by apache.

the class ObjectStore method addForeignKeys.

private void addForeignKeys(List<SQLForeignKey> fks, boolean retrieveCD) throws InvalidObjectException, MetaException {
    List<MConstraint> mpkfks = new ArrayList<MConstraint>();
    String currentConstraintName = null;
    for (int i = 0; i < fks.size(); i++) {
        AttachedMTableInfo nParentTable = getMTable(fks.get(i).getPktable_db(), fks.get(i).getPktable_name(), retrieveCD);
        MTable parentTable = nParentTable.mtbl;
        if (parentTable == null) {
            throw new InvalidObjectException("Parent table not found: " + fks.get(i).getPktable_name());
        }
        AttachedMTableInfo nChildTable = getMTable(fks.get(i).getFktable_db(), fks.get(i).getFktable_name(), retrieveCD);
        MTable childTable = nChildTable.mtbl;
        if (childTable == null) {
            throw new InvalidObjectException("Child table not found: " + fks.get(i).getFktable_name());
        }
        MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
        List<MFieldSchema> parentCols = parentCD == null ? null : parentCD.getCols();
        int parentIntegerIndex = getColumnIndexFromTableColumns(parentCols, fks.get(i).getPkcolumn_name());
        if (parentIntegerIndex == -1) {
            throw new InvalidObjectException("Parent column not found: " + fks.get(i).getPkcolumn_name());
        }
        MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
        List<MFieldSchema> childCols = childCD.getCols();
        int childIntegerIndex = getColumnIndexFromTableColumns(childCols, fks.get(i).getFkcolumn_name());
        if (childIntegerIndex == -1) {
            throw new InvalidObjectException("Child column not found: " + fks.get(i).getFkcolumn_name());
        }
        if (fks.get(i).getFk_name() == null) {
            // the uniqueness of the generated constraint name.
            if (fks.get(i).getKey_seq() == 1) {
                currentConstraintName = generateConstraintName(fks.get(i).getFktable_db(), fks.get(i).getFktable_name(), fks.get(i).getPktable_db(), fks.get(i).getPktable_name(), fks.get(i).getPkcolumn_name(), fks.get(i).getFkcolumn_name(), "fk");
            }
        } else {
            currentConstraintName = fks.get(i).getFk_name();
        }
        Integer updateRule = fks.get(i).getUpdate_rule();
        Integer deleteRule = fks.get(i).getDelete_rule();
        int enableValidateRely = (fks.get(i).isEnable_cstr() ? 4 : 0) + (fks.get(i).isValidate_cstr() ? 2 : 0) + (fks.get(i).isRely_cstr() ? 1 : 0);
        MConstraint mpkfk = new MConstraint(currentConstraintName, MConstraint.FOREIGN_KEY_CONSTRAINT, fks.get(i).getKey_seq(), deleteRule, updateRule, enableValidateRely, parentTable, childTable, parentCD, childCD, childIntegerIndex, parentIntegerIndex);
        mpkfks.add(mpkfk);
    }
    pm.makePersistentAll(mpkfks);
}
Also used : MTable(org.apache.hadoop.hive.metastore.model.MTable) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) ArrayList(java.util.ArrayList) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MColumnDescriptor(org.apache.hadoop.hive.metastore.model.MColumnDescriptor) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint)

Example 4 with MFieldSchema

use of org.apache.hadoop.hive.metastore.model.MFieldSchema in project hive by apache.

the class ObjectStore method convertToStorageDescriptor.

// MSD and SD should be same objects. Not sure how to make then same right now
// MSerdeInfo *& SerdeInfo should be same as well
private StorageDescriptor convertToStorageDescriptor(MStorageDescriptor msd, boolean noFS) throws MetaException {
    if (msd == null) {
        return null;
    }
    List<MFieldSchema> mFieldSchemas = msd.getCD() == null ? null : msd.getCD().getCols();
    StorageDescriptor sd = new StorageDescriptor(noFS ? null : convertToFieldSchemas(mFieldSchemas), msd.getLocation(), msd.getInputFormat(), msd.getOutputFormat(), msd.isCompressed(), msd.getNumBuckets(), convertToSerDeInfo(msd.getSerDeInfo()), convertList(msd.getBucketCols()), convertToOrders(msd.getSortCols()), convertMap(msd.getParameters()));
    SkewedInfo skewedInfo = new SkewedInfo(convertList(msd.getSkewedColNames()), convertToSkewedValues(msd.getSkewedColValues()), covertToSkewedMap(msd.getSkewedColValueLocationMaps()));
    sd.setSkewedInfo(skewedInfo);
    sd.setStoredAsSubDirectories(msd.isStoredAsSubDirectories());
    return sd;
}
Also used : SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) MStorageDescriptor(org.apache.hadoop.hive.metastore.model.MStorageDescriptor)

Aggregations

MFieldSchema (org.apache.hadoop.hive.metastore.model.MFieldSchema)4 ArrayList (java.util.ArrayList)3 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)1 FileMetadataExprType (org.apache.hadoop.hive.metastore.api.FileMetadataExprType)1 FunctionType (org.apache.hadoop.hive.metastore.api.FunctionType)1 HiveObjectType (org.apache.hadoop.hive.metastore.api.HiveObjectType)1 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)1 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)1 PartitionEventType (org.apache.hadoop.hive.metastore.api.PartitionEventType)1 PrincipalType (org.apache.hadoop.hive.metastore.api.PrincipalType)1 ResourceType (org.apache.hadoop.hive.metastore.api.ResourceType)1 SkewedInfo (org.apache.hadoop.hive.metastore.api.SkewedInfo)1 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)1 Type (org.apache.hadoop.hive.metastore.api.Type)1 MColumnDescriptor (org.apache.hadoop.hive.metastore.model.MColumnDescriptor)1 MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)1 MPartitionColumnPrivilege (org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege)1 MPartitionPrivilege (org.apache.hadoop.hive.metastore.model.MPartitionPrivilege)1 MStorageDescriptor (org.apache.hadoop.hive.metastore.model.MStorageDescriptor)1 MTable (org.apache.hadoop.hive.metastore.model.MTable)1