Search in sources :

Example 26 with InvalidObjectException

use of org.apache.hadoop.hive.metastore.api.InvalidObjectException in project hive by apache.

the class ObjectStore method addPrimaryKeys.

private void addPrimaryKeys(List<SQLPrimaryKey> pks, boolean retrieveCD) throws InvalidObjectException, MetaException {
    List<MConstraint> mpks = new ArrayList<MConstraint>();
    String constraintName = null;
    for (int i = 0; i < pks.size(); i++) {
        AttachedMTableInfo nParentTable = getMTable(pks.get(i).getTable_db(), pks.get(i).getTable_name(), retrieveCD);
        MTable parentTable = nParentTable.mtbl;
        if (parentTable == null) {
            throw new InvalidObjectException("Parent table not found: " + pks.get(i).getTable_name());
        }
        MColumnDescriptor parentCD = retrieveCD ? nParentTable.mcd : parentTable.getSd().getCD();
        int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD == null ? null : parentCD.getCols(), pks.get(i).getColumn_name());
        if (parentIntegerIndex == -1) {
            throw new InvalidObjectException("Parent column not found: " + pks.get(i).getColumn_name());
        }
        if (getPrimaryKeyConstraintName(parentTable.getDatabase().getName(), parentTable.getTableName()) != null) {
            throw new MetaException(" Primary key already exists for: " + parentTable.getDatabase().getName() + "." + pks.get(i).getTable_name());
        }
        if (pks.get(i).getPk_name() == null) {
            if (pks.get(i).getKey_seq() == 1) {
                constraintName = generateConstraintName(pks.get(i).getTable_db(), pks.get(i).getTable_name(), pks.get(i).getColumn_name(), "pk");
            }
        } else {
            constraintName = pks.get(i).getPk_name();
        }
        int enableValidateRely = (pks.get(i).isEnable_cstr() ? 4 : 0) + (pks.get(i).isValidate_cstr() ? 2 : 0) + (pks.get(i).isRely_cstr() ? 1 : 0);
        MConstraint mpk = new MConstraint(constraintName, MConstraint.PRIMARY_KEY_CONSTRAINT, pks.get(i).getKey_seq(), null, null, enableValidateRely, parentTable, null, parentCD, null, null, parentIntegerIndex);
        mpks.add(mpk);
    }
    pm.makePersistentAll(mpks);
}
Also used : MTable(org.apache.hadoop.hive.metastore.model.MTable) ArrayList(java.util.ArrayList) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MColumnDescriptor(org.apache.hadoop.hive.metastore.model.MColumnDescriptor) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 27 with InvalidObjectException

use of org.apache.hadoop.hive.metastore.api.InvalidObjectException in project hive by apache.

the class HCatClientHMSImpl method addPartition.

@Override
public void addPartition(HCatAddPartitionDesc partInfo) throws HCatException {
    Table tbl = null;
    try {
        tbl = hmsClient.getTable(partInfo.getDatabaseName(), partInfo.getTableName());
        // TODO: Should be moved out.
        if (tbl.getPartitionKeysSize() == 0) {
            throw new HCatException("The table " + partInfo.getTableName() + " is not partitioned.");
        }
        HCatTable hcatTable = new HCatTable(tbl);
        HCatPartition hcatPartition = partInfo.getHCatPartition();
        // This is only required to support the deprecated methods in HCatAddPartitionDesc.Builder.
        if (hcatPartition == null) {
            hcatPartition = partInfo.getHCatPartition(hcatTable);
        }
        hmsClient.add_partition(hcatPartition.toHivePartition());
    } catch (InvalidObjectException e) {
        throw new HCatException("InvalidObjectException while adding partition.", e);
    } catch (AlreadyExistsException e) {
        throw new HCatException("AlreadyExistsException while adding partition.", e);
    } catch (MetaException e) {
        throw new HCatException("MetaException while adding partition.", e);
    } catch (NoSuchObjectException e) {
        throw new ObjectNotFoundException("The table " + partInfo.getTableName() + " is could not be found.", e);
    } catch (TException e) {
        throw new ConnectionFailureException("TException while adding partition.", e);
    }
}
Also used : TException(org.apache.thrift.TException) Table(org.apache.hadoop.hive.metastore.api.Table) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) HCatException(org.apache.hive.hcatalog.common.HCatException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 28 with InvalidObjectException

use of org.apache.hadoop.hive.metastore.api.InvalidObjectException in project hive by apache.

the class HCatClientHMSImpl method addPartitions.

/*
   * @param partInfoList
   *  @return The size of the list of partitions.
   * @throws HCatException,ConnectionFailureException
   * @see org.apache.hive.hcatalog.api.HCatClient#addPartitions(java.util.List)
   */
@Override
public int addPartitions(List<HCatAddPartitionDesc> partInfoList) throws HCatException {
    int numPartitions = -1;
    if ((partInfoList == null) || (partInfoList.size() == 0)) {
        throw new HCatException("The partition list is null or empty.");
    }
    Table tbl = null;
    try {
        tbl = hmsClient.getTable(partInfoList.get(0).getDatabaseName(), partInfoList.get(0).getTableName());
        HCatTable hcatTable = new HCatTable(tbl);
        ArrayList<Partition> ptnList = new ArrayList<Partition>();
        for (HCatAddPartitionDesc desc : partInfoList) {
            HCatPartition hCatPartition = desc.getHCatPartition();
            // This is required only to support the deprecated HCatAddPartitionDesc.Builder interfaces.
            if (hCatPartition == null) {
                hCatPartition = desc.getHCatPartition(hcatTable);
            }
            ptnList.add(hCatPartition.toHivePartition());
        }
        numPartitions = hmsClient.add_partitions(ptnList);
    } catch (InvalidObjectException e) {
        throw new HCatException("InvalidObjectException while adding partition.", e);
    } catch (AlreadyExistsException e) {
        throw new HCatException("AlreadyExistsException while adding partition.", e);
    } catch (MetaException e) {
        throw new HCatException("MetaException while adding partition.", e);
    } catch (NoSuchObjectException e) {
        throw new ObjectNotFoundException("The table " + partInfoList.get(0).getTableName() + " is could not be found.", e);
    } catch (TException e) {
        throw new ConnectionFailureException("TException while adding partition.", e);
    }
    return numPartitions;
}
Also used : TException(org.apache.thrift.TException) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) HCatException(org.apache.hive.hcatalog.common.HCatException) ArrayList(java.util.ArrayList) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 29 with InvalidObjectException

use of org.apache.hadoop.hive.metastore.api.InvalidObjectException in project hive by apache.

the class StatObjectConverter method convertToMTableColumnStatistics.

// JDO
public static MTableColumnStatistics convertToMTableColumnStatistics(MTable table, ColumnStatisticsDesc statsDesc, ColumnStatisticsObj statsObj) throws NoSuchObjectException, MetaException, InvalidObjectException {
    if (statsObj == null || statsDesc == null) {
        throw new InvalidObjectException("Invalid column stats object");
    }
    MTableColumnStatistics mColStats = new MTableColumnStatistics();
    mColStats.setTable(table);
    mColStats.setDbName(statsDesc.getDbName());
    mColStats.setTableName(statsDesc.getTableName());
    mColStats.setLastAnalyzed(statsDesc.getLastAnalyzed());
    mColStats.setColName(statsObj.getColName());
    mColStats.setColType(statsObj.getColType());
    if (statsObj.getStatsData().isSetBooleanStats()) {
        BooleanColumnStatsData boolStats = statsObj.getStatsData().getBooleanStats();
        mColStats.setBooleanStats(boolStats.isSetNumTrues() ? boolStats.getNumTrues() : null, boolStats.isSetNumFalses() ? boolStats.getNumFalses() : null, boolStats.isSetNumNulls() ? boolStats.getNumNulls() : null);
    } else if (statsObj.getStatsData().isSetLongStats()) {
        LongColumnStatsData longStats = statsObj.getStatsData().getLongStats();
        mColStats.setLongStats(longStats.isSetNumNulls() ? longStats.getNumNulls() : null, longStats.isSetNumDVs() ? longStats.getNumDVs() : null, longStats.isSetLowValue() ? longStats.getLowValue() : null, longStats.isSetHighValue() ? longStats.getHighValue() : null);
    } else if (statsObj.getStatsData().isSetDoubleStats()) {
        DoubleColumnStatsData doubleStats = statsObj.getStatsData().getDoubleStats();
        mColStats.setDoubleStats(doubleStats.isSetNumNulls() ? doubleStats.getNumNulls() : null, doubleStats.isSetNumDVs() ? doubleStats.getNumDVs() : null, doubleStats.isSetLowValue() ? doubleStats.getLowValue() : null, doubleStats.isSetHighValue() ? doubleStats.getHighValue() : null);
    } else if (statsObj.getStatsData().isSetDecimalStats()) {
        DecimalColumnStatsData decimalStats = statsObj.getStatsData().getDecimalStats();
        String low = decimalStats.isSetLowValue() ? createJdoDecimalString(decimalStats.getLowValue()) : null;
        String high = decimalStats.isSetHighValue() ? createJdoDecimalString(decimalStats.getHighValue()) : null;
        mColStats.setDecimalStats(decimalStats.isSetNumNulls() ? decimalStats.getNumNulls() : null, decimalStats.isSetNumDVs() ? decimalStats.getNumDVs() : null, low, high);
    } else if (statsObj.getStatsData().isSetStringStats()) {
        StringColumnStatsData stringStats = statsObj.getStatsData().getStringStats();
        mColStats.setStringStats(stringStats.isSetNumNulls() ? stringStats.getNumNulls() : null, stringStats.isSetNumDVs() ? stringStats.getNumDVs() : null, stringStats.isSetMaxColLen() ? stringStats.getMaxColLen() : null, stringStats.isSetAvgColLen() ? stringStats.getAvgColLen() : null);
    } else if (statsObj.getStatsData().isSetBinaryStats()) {
        BinaryColumnStatsData binaryStats = statsObj.getStatsData().getBinaryStats();
        mColStats.setBinaryStats(binaryStats.isSetNumNulls() ? binaryStats.getNumNulls() : null, binaryStats.isSetMaxColLen() ? binaryStats.getMaxColLen() : null, binaryStats.isSetAvgColLen() ? binaryStats.getAvgColLen() : null);
    } else if (statsObj.getStatsData().isSetDateStats()) {
        DateColumnStatsData dateStats = statsObj.getStatsData().getDateStats();
        mColStats.setDateStats(dateStats.isSetNumNulls() ? dateStats.getNumNulls() : null, dateStats.isSetNumDVs() ? dateStats.getNumDVs() : null, dateStats.isSetLowValue() ? dateStats.getLowValue().getDaysSinceEpoch() : null, dateStats.isSetHighValue() ? dateStats.getHighValue().getDaysSinceEpoch() : null);
    }
    return mColStats;
}
Also used : BooleanColumnStatsData(org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData) DoubleColumnStatsData(org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData) DecimalColumnStatsData(org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData) DateColumnStatsData(org.apache.hadoop.hive.metastore.api.DateColumnStatsData) MTableColumnStatistics(org.apache.hadoop.hive.metastore.model.MTableColumnStatistics) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) StringColumnStatsData(org.apache.hadoop.hive.metastore.api.StringColumnStatsData) LongColumnStatsData(org.apache.hadoop.hive.metastore.api.LongColumnStatsData) BinaryColumnStatsData(org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData)

Example 30 with InvalidObjectException

use of org.apache.hadoop.hive.metastore.api.InvalidObjectException in project hive by apache.

the class ObjectStore method alterTable.

@Override
public void alterTable(String dbname, String name, Table newTable) throws InvalidObjectException, MetaException {
    boolean success = false;
    try {
        openTransaction();
        name = HiveStringUtils.normalizeIdentifier(name);
        dbname = HiveStringUtils.normalizeIdentifier(dbname);
        MTable newt = convertToMTable(newTable);
        if (newt == null) {
            throw new InvalidObjectException("new table is invalid");
        }
        MTable oldt = getMTable(dbname, name);
        if (oldt == null) {
            throw new MetaException("table " + dbname + "." + name + " doesn't exist");
        }
        // For now only alter name, owner, parameters, cols, bucketcols are allowed
        oldt.setDatabase(newt.getDatabase());
        oldt.setTableName(HiveStringUtils.normalizeIdentifier(newt.getTableName()));
        oldt.setParameters(newt.getParameters());
        oldt.setOwner(newt.getOwner());
        // Fully copy over the contents of the new SD into the old SD,
        // so we don't create an extra SD in the metastore db that has no references.
        copyMSD(newt.getSd(), oldt.getSd());
        oldt.setRetention(newt.getRetention());
        oldt.setPartitionKeys(newt.getPartitionKeys());
        oldt.setTableType(newt.getTableType());
        oldt.setLastAccessTime(newt.getLastAccessTime());
        oldt.setViewOriginalText(newt.getViewOriginalText());
        oldt.setViewExpandedText(newt.getViewExpandedText());
        oldt.setRewriteEnabled(newt.isRewriteEnabled());
        // commit the changes
        success = commitTransaction();
    } finally {
        if (!success) {
            rollbackTransaction();
        }
    }
}
Also used : MTable(org.apache.hadoop.hive.metastore.model.MTable) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Aggregations

InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)36 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)21 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)21 Table (org.apache.hadoop.hive.metastore.api.Table)14 TException (org.apache.thrift.TException)14 ArrayList (java.util.ArrayList)13 Partition (org.apache.hadoop.hive.metastore.api.Partition)11 IOException (java.io.IOException)8 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)8 InvalidInputException (org.apache.hadoop.hive.metastore.api.InvalidInputException)8 MTable (org.apache.hadoop.hive.metastore.model.MTable)8 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)7 InvalidMetaException (com.netflix.metacat.common.server.connectors.exception.InvalidMetaException)6 ConnectorException (com.netflix.metacat.common.server.connectors.exception.ConnectorException)5 List (java.util.List)5 TableNotFoundException (com.netflix.metacat.common.server.connectors.exception.TableNotFoundException)4 UnknownDBException (org.apache.hadoop.hive.metastore.api.UnknownDBException)4 MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)4 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)3 SerDeInfo (org.apache.hadoop.hive.metastore.api.SerDeInfo)3