Search in sources :

Example 81 with MetaException

use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.

the class TestExchangePartitions method testExchangePartitionsOneFail.

@Test
public void testExchangePartitionsOneFail() throws Exception {
    Partition partition = buildPartition(destTable, Lists.newArrayList("2017", "march", "22"), null);
    client.add_partition(partition);
    Map<String, String> partitionSpecs = getPartitionSpec(Lists.newArrayList("2017", "", ""));
    try {
        client.exchange_partitions(partitionSpecs, DB_NAME, sourceTable.getTableName(), DB_NAME, destTable.getTableName());
        Assert.fail("Exception should have been thrown as one of the partitions already exists in the dest table.");
    } catch (MetaException e) {
    // Expected exception
    }
    checkRemainingPartitions(sourceTable, destTable, Lists.newArrayList(partitions[0], partitions[2], partitions[3], partitions[4]));
    List<Partition> partitionsInDestTable = client.listPartitions(destTable.getDbName(), destTable.getTableName(), MAX);
    Assert.assertEquals(1, partitionsInDestTable.size());
    Assert.assertEquals(partitions[1].getValues(), partitionsInDestTable.get(0).getValues());
    Assert.assertTrue(metaStore.isPathExists(new Path(partitionsInDestTable.get(0).getSd().getLocation())));
    Partition resultPart = client.getPartition(sourceTable.getDbName(), sourceTable.getTableName(), partitions[1].getValues());
    Assert.assertNotNull(resultPart);
    Assert.assertTrue(metaStore.isPathExists(new Path(partitions[1].getSd().getLocation())));
}
Also used : Path(org.apache.hadoop.fs.Path) Partition(org.apache.hadoop.hive.metastore.api.Partition) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 82 with MetaException

use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.

the class FileOutputFormatContainer method checkOutputSpecs.

@Override
public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
    IMetaStoreClient client = null;
    try {
        HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
        client = HCatUtil.getHiveMetastoreClient(hiveConf);
        handleDuplicatePublish(context, jobInfo, client, new Table(jobInfo.getTableInfo().getTable()));
    } catch (MetaException e) {
        throw new IOException(e);
    } catch (TException e) {
        throw new IOException(e);
    } finally {
        HCatUtil.closeHiveClientQuietly(client);
    }
    if (!jobInfo.isDynamicPartitioningUsed()) {
        JobConf jobConf = new JobConf(context.getConfiguration());
        getBaseOutputFormat().checkOutputSpecs(null, jobConf);
        // checkoutputspecs might've set some properties we need to have context reflect that
        HCatUtil.copyConf(jobConf, context.getConfiguration());
    }
}
Also used : TException(org.apache.thrift.TException) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveConf(org.apache.hadoop.hive.conf.HiveConf) IOException(java.io.IOException) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) JobConf(org.apache.hadoop.mapred.JobConf) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 83 with MetaException

use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.

the class ObjectStore method grantRole.

@Override
public boolean grantRole(Role role, String userName, PrincipalType principalType, String grantor, PrincipalType grantorType, boolean grantOption) throws MetaException, NoSuchObjectException, InvalidObjectException {
    boolean success = false;
    boolean commited = false;
    try {
        openTransaction();
        MRoleMap roleMap = null;
        try {
            roleMap = this.getMSecurityUserRoleMap(userName, principalType, role.getRoleName());
        } catch (Exception e) {
        }
        if (roleMap != null) {
            throw new InvalidObjectException("Principal " + userName + " already has the role " + role.getRoleName());
        }
        if (principalType == PrincipalType.ROLE) {
            validateRole(userName);
        }
        MRole mRole = getMRole(role.getRoleName());
        long now = System.currentTimeMillis() / 1000;
        MRoleMap roleMember = new MRoleMap(userName, principalType.toString(), mRole, (int) now, grantor, grantorType.toString(), grantOption);
        pm.makePersistent(roleMember);
        commited = commitTransaction();
        success = true;
    } finally {
        if (!commited) {
            rollbackTransaction();
        }
    }
    return success;
}
Also used : MRole(org.apache.hadoop.hive.metastore.model.MRole) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MRoleMap(org.apache.hadoop.hive.metastore.model.MRoleMap) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidInputException(org.apache.hadoop.hive.metastore.api.InvalidInputException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) SQLIntegrityConstraintViolationException(java.sql.SQLIntegrityConstraintViolationException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) JDOCanRetryException(javax.jdo.JDOCanRetryException) InvalidPartitionException(org.apache.hadoop.hive.metastore.api.InvalidPartitionException) UnknownPartitionException(org.apache.hadoop.hive.metastore.api.UnknownPartitionException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) JDOException(javax.jdo.JDOException) MissingTableException(org.datanucleus.store.rdbms.exceptions.MissingTableException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) JDODataStoreException(javax.jdo.JDODataStoreException) JDOObjectNotFoundException(javax.jdo.JDOObjectNotFoundException) UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException)

Example 84 with MetaException

use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.

the class ObjectStore method generateConstraintName.

private String generateConstraintName(String... parameters) throws MetaException {
    int hashcode = ArrayUtils.toString(parameters).hashCode() & 0xfffffff;
    int counter = 0;
    final int MAX_RETRIES = 10;
    while (counter < MAX_RETRIES) {
        String currName = (parameters.length == 0 ? "constraint_" : parameters[parameters.length - 1]) + "_" + hashcode + "_" + System.currentTimeMillis() + "_" + (counter++);
        if (!constraintNameAlreadyExists(currName)) {
            return currName;
        }
    }
    throw new MetaException("Error while trying to generate the constraint name for " + ArrayUtils.toString(parameters));
}
Also used : MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 85 with MetaException

use of org.apache.hadoop.hive.metastore.api.MetaException in project hive by apache.

the class ObjectStore method addPartitions.

@Override
public boolean addPartitions(String dbName, String tblName, List<Partition> parts) throws InvalidObjectException, MetaException {
    boolean success = false;
    openTransaction();
    try {
        List<MTablePrivilege> tabGrants = null;
        List<MTableColumnPrivilege> tabColumnGrants = null;
        MTable table = this.getMTable(dbName, tblName);
        if ("TRUE".equalsIgnoreCase(table.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) {
            tabGrants = this.listAllTableGrants(dbName, tblName);
            tabColumnGrants = this.listTableAllColumnGrants(dbName, tblName);
        }
        List<Object> toPersist = new ArrayList<>();
        for (Partition part : parts) {
            if (!part.getTableName().equals(tblName) || !part.getDbName().equals(dbName)) {
                throw new MetaException("Partition does not belong to target table " + dbName + "." + tblName + ": " + part);
            }
            MPartition mpart = convertToMPart(part, true);
            toPersist.add(mpart);
            int now = (int) (System.currentTimeMillis() / 1000);
            if (tabGrants != null) {
                for (MTablePrivilege tab : tabGrants) {
                    toPersist.add(new MPartitionPrivilege(tab.getPrincipalName(), tab.getPrincipalType(), mpart, tab.getPrivilege(), now, tab.getGrantor(), tab.getGrantorType(), tab.getGrantOption()));
                }
            }
            if (tabColumnGrants != null) {
                for (MTableColumnPrivilege col : tabColumnGrants) {
                    toPersist.add(new MPartitionColumnPrivilege(col.getPrincipalName(), col.getPrincipalType(), mpart, col.getColumnName(), col.getPrivilege(), now, col.getGrantor(), col.getGrantorType(), col.getGrantOption()));
                }
            }
        }
        if (CollectionUtils.isNotEmpty(toPersist)) {
            pm.makePersistentAll(toPersist);
            pm.flush();
        }
        success = commitTransaction();
    } finally {
        if (!success) {
            rollbackTransaction();
        }
    }
    return success;
}
Also used : MPartition(org.apache.hadoop.hive.metastore.model.MPartition) Partition(org.apache.hadoop.hive.metastore.api.Partition) ArrayList(java.util.ArrayList) MPartitionColumnPrivilege(org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) MTable(org.apache.hadoop.hive.metastore.model.MTable) MPartitionPrivilege(org.apache.hadoop.hive.metastore.model.MPartitionPrivilege) MTablePrivilege(org.apache.hadoop.hive.metastore.model.MTablePrivilege) MTableColumnPrivilege(org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) MPartition(org.apache.hadoop.hive.metastore.model.MPartition)

Aggregations

MetaException (org.apache.hadoop.hive.metastore.api.MetaException)318 IOException (java.io.IOException)123 ArrayList (java.util.ArrayList)95 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)74 TException (org.apache.thrift.TException)67 Table (org.apache.hadoop.hive.metastore.api.Table)59 Partition (org.apache.hadoop.hive.metastore.api.Partition)57 SQLException (java.sql.SQLException)55 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)53 Path (org.apache.hadoop.fs.Path)45 Connection (java.sql.Connection)36 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)34 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)32 Statement (java.sql.Statement)31 Test (org.junit.Test)30 List (java.util.List)25 Database (org.apache.hadoop.hive.metastore.api.Database)25 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)25 ResultSet (java.sql.ResultSet)22 UnknownDBException (org.apache.hadoop.hive.metastore.api.UnknownDBException)22