Search in sources :

Example 41 with InvalidOperationException

use of org.apache.hadoop.hive.metastore.api.InvalidOperationException in project hive by apache.

the class ObjectStore method handleSimpleAlter.

private WMFullResourcePlan handleSimpleAlter(String name, String ns, WMNullableResourcePlan changes, boolean canActivateDisabled, boolean canDeactivate) throws InvalidOperationException, NoSuchObjectException, MetaException {
    MWMResourcePlan plan = name == null ? getActiveMWMResourcePlan(ns) : getMWMResourcePlan(name, ns, !changes.isSetStatus());
    boolean hasNsChange = changes.isSetNs() && !changes.getNs().equals(getNsOrDefault(plan.getNs()));
    if (hasNsChange) {
        throw new InvalidOperationException("Cannot change ns; from " + getNsOrDefault(plan.getNs()) + " to " + changes.getNs());
    }
    boolean hasNameChange = changes.isSetName() && !changes.getName().equals(name);
    // Verify that field changes are consistent with what Hive does. Note: we could handle this.
    if (changes.isSetIsSetQueryParallelism() || changes.isSetIsSetDefaultPoolPath() || hasNameChange) {
        if (changes.isSetStatus()) {
            throw new InvalidOperationException("Cannot change values during status switch.");
        } else if (plan.getStatus() != MWMResourcePlan.Status.DISABLED) {
            throw new InvalidOperationException("Resource plan must be disabled to edit it.");
        }
    }
    // Handle rename and other changes.
    if (changes.isSetName()) {
        String newName = normalizeIdentifier(changes.getName());
        if (newName.isEmpty()) {
            throw new InvalidOperationException("Cannot rename to empty value.");
        }
        if (!newName.equals(plan.getName())) {
            plan.setName(newName);
        }
    }
    if (changes.isSetIsSetQueryParallelism() && changes.isIsSetQueryParallelism()) {
        if (changes.isSetQueryParallelism()) {
            if (changes.getQueryParallelism() <= 0) {
                throw new InvalidOperationException("queryParallelism should be positive.");
            }
            plan.setQueryParallelism(changes.getQueryParallelism());
        } else {
            plan.setQueryParallelism(null);
        }
    }
    if (changes.isSetIsSetDefaultPoolPath() && changes.isIsSetDefaultPoolPath()) {
        if (changes.isSetDefaultPoolPath()) {
            MWMPool pool = getPool(plan, changes.getDefaultPoolPath());
            plan.setDefaultPool(pool);
        } else {
            plan.setDefaultPool(null);
        }
    }
    // Handle the status change.
    if (changes.isSetStatus()) {
        return switchStatus(name, plan, changes.getStatus().name(), canActivateDisabled, canDeactivate);
    }
    return null;
}
Also used : MWMPool(org.apache.hadoop.hive.metastore.model.MWMPool) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) MWMResourcePlan(org.apache.hadoop.hive.metastore.model.MWMResourcePlan)

Example 42 with InvalidOperationException

use of org.apache.hadoop.hive.metastore.api.InvalidOperationException in project hive by apache.

the class TestAlterPartitions method testRenamePartitionNullOldPartList.

@Test
public void testRenamePartitionNullOldPartList() throws Exception {
    createTable4PartColsParts(client);
    List<Partition> oldParts = client.listPartitions(DB_NAME, TABLE_NAME, (short) -1);
    Partition partToRename = oldParts.get(3);
    partToRename.setValues(Lists.newArrayList("2018", "01", "16"));
    try {
        client.renamePartition(DB_NAME, TABLE_NAME, null, partToRename);
        Assert.fail("should throw");
    } catch (InvalidOperationException | TProtocolException ex) {
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) TProtocolException(org.apache.thrift.protocol.TProtocolException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Example 43 with InvalidOperationException

use of org.apache.hadoop.hive.metastore.api.InvalidOperationException in project hive by apache.

the class TestHiveMetaStore method testAlterTable.

@Test
public void testAlterTable() throws Exception {
    String dbName = "alterdb";
    String invTblName = "alterĀ§tbl";
    String tblName = "altertbl";
    try {
        client.dropTable(dbName, tblName);
        silentDropDatabase(dbName);
        String dbLocation = MetastoreConf.getVar(conf, ConfVars.WAREHOUSE_EXTERNAL) + "/_testDB_table_create_";
        String mgdLocation = MetastoreConf.getVar(conf, ConfVars.WAREHOUSE) + "/_testDB_table_create_";
        new DatabaseBuilder().setName(dbName).setLocation(dbLocation).setManagedLocation(mgdLocation).create(client, conf);
        ArrayList<FieldSchema> invCols = new ArrayList<>(2);
        invCols.add(new FieldSchema("n-ame", ColumnType.STRING_TYPE_NAME, ""));
        invCols.add(new FieldSchema("in.come", ColumnType.INT_TYPE_NAME, ""));
        Table tbl = new TableBuilder().setDbName(dbName).setTableName(invTblName).setCols(invCols).build(conf);
        boolean failed = false;
        try {
            client.createTable(tbl);
        } catch (InvalidObjectException ex) {
            failed = true;
        }
        if (!failed) {
            assertTrue("Able to create table with invalid name: " + invTblName, false);
        }
        // create an invalid table which has wrong column type
        ArrayList<FieldSchema> invColsInvType = new ArrayList<>(2);
        invColsInvType.add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
        invColsInvType.add(new FieldSchema("income", "xyz", ""));
        tbl.setTableName(tblName);
        tbl.getSd().setCols(invColsInvType);
        boolean failChecker = false;
        try {
            client.createTable(tbl);
        } catch (InvalidObjectException ex) {
            failChecker = true;
        }
        if (!failChecker) {
            assertTrue("Able to create table with invalid column type: " + invTblName, false);
        }
        ArrayList<FieldSchema> cols = new ArrayList<>(2);
        cols.add(new FieldSchema("name", ColumnType.STRING_TYPE_NAME, ""));
        cols.add(new FieldSchema("income", ColumnType.INT_TYPE_NAME, ""));
        // create a valid table
        tbl.setTableName(tblName);
        tbl.getSd().setCols(cols);
        client.createTable(tbl);
        if (isThriftClient) {
            tbl = client.getTable(tbl.getDbName(), tbl.getTableName());
        }
        // now try to invalid alter table
        Table tbl2 = client.getTable(dbName, tblName);
        failed = false;
        try {
            tbl2.setTableName(invTblName);
            tbl2.getSd().setCols(invCols);
            client.alter_table(dbName, tblName, tbl2);
        } catch (InvalidOperationException ex) {
            failed = true;
        }
        if (!failed) {
            assertTrue("Able to rename table with invalid name: " + invTblName, false);
        }
        // try an invalid alter table with partition key name
        Table tbl_pk = client.getTable(tbl.getDbName(), tbl.getTableName());
        List<FieldSchema> partitionKeys = tbl_pk.getPartitionKeys();
        for (FieldSchema fs : partitionKeys) {
            fs.setName("invalid_to_change_name");
            fs.setComment("can_change_comment");
        }
        tbl_pk.setPartitionKeys(partitionKeys);
        try {
            client.alter_table(dbName, tblName, tbl_pk);
        } catch (InvalidOperationException ex) {
            failed = true;
        }
        assertTrue("Should not have succeeded in altering partition key name", failed);
        // try a valid alter table partition key comment
        failed = false;
        tbl_pk = client.getTable(tbl.getDbName(), tbl.getTableName());
        partitionKeys = tbl_pk.getPartitionKeys();
        for (FieldSchema fs : partitionKeys) {
            fs.setComment("can_change_comment");
        }
        tbl_pk.setPartitionKeys(partitionKeys);
        try {
            client.alter_table(dbName, tblName, tbl_pk);
        } catch (InvalidOperationException ex) {
            failed = true;
        }
        assertFalse("Should not have failed alter table partition comment", failed);
        Table newT = client.getTable(tbl.getDbName(), tbl.getTableName());
        assertEquals(partitionKeys, newT.getPartitionKeys());
        // try a valid alter table
        tbl2.setTableName(tblName + "_renamed");
        tbl2.getSd().setCols(cols);
        tbl2.getSd().setNumBuckets(32);
        client.alter_table(dbName, tblName, tbl2);
        Table tbl3 = client.getTable(dbName, tbl2.getTableName());
        assertEquals("Alter table didn't succeed. Num buckets is different ", tbl2.getSd().getNumBuckets(), tbl3.getSd().getNumBuckets());
        // check that data has moved
        FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(), conf);
        assertFalse("old table location still exists", fs.exists(new Path(tbl.getSd().getLocation())));
        assertTrue("data did not move to new location", fs.exists(new Path(tbl3.getSd().getLocation())));
        if (!isThriftClient) {
            assertEquals("alter table didn't move data correct location", tbl3.getSd().getLocation(), tbl2.getSd().getLocation());
        }
        // alter table with invalid column type
        tbl_pk.getSd().setCols(invColsInvType);
        failed = false;
        try {
            client.alter_table(dbName, tbl2.getTableName(), tbl_pk);
        } catch (InvalidOperationException ex) {
            failed = true;
        }
        assertTrue("Should not have succeeded in altering column", failed);
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("testSimpleTable() failed.");
        throw e;
    } finally {
        silentDropDatabase(dbName);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) ConfigValSecurityException(org.apache.hadoop.hive.metastore.api.ConfigValSecurityException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) IOException(java.io.IOException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) DatabaseBuilder(org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder) FileSystem(org.apache.hadoop.fs.FileSystem) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) Test(org.junit.Test)

Example 44 with InvalidOperationException

use of org.apache.hadoop.hive.metastore.api.InvalidOperationException in project hive by apache.

the class ObjectStore method dropWMPool.

@Override
public void dropWMPool(String resourcePlanName, String poolPath, String ns) throws NoSuchObjectException, InvalidOperationException, MetaException {
    poolPath = normalizeIdentifier(poolPath);
    boolean commited = false;
    Query query = null;
    try {
        openTransaction();
        MWMResourcePlan resourcePlan = getMWMResourcePlan(resourcePlanName, ns, true);
        if (resourcePlan.getDefaultPool() != null && resourcePlan.getDefaultPool().getPath().equals(poolPath)) {
            throw new InvalidOperationException("Cannot drop default pool of a resource plan");
        }
        if (poolHasChildren(resourcePlan, poolPath)) {
            throw new InvalidOperationException("Cannot drop a pool that has child pools");
        }
        query = pm.newQuery(MWMPool.class, "resourcePlan == rp && path.startsWith(poolPath)");
        query.declareParameters("MWMResourcePlan rp, java.lang.String poolPath");
        if (query.deletePersistentAll(resourcePlan, poolPath) != 1) {
            throw new NoSuchObjectException("Cannot delete pool: " + poolPath);
        }
        commited = commitTransaction();
    } catch (Exception e) {
        if (getConstraintException(e) != null) {
            throw new InvalidOperationException("Please remove all mappings for this pool.");
        }
        throw e;
    } finally {
        rollbackAndCleanup(commited, query);
    }
}
Also used : ScheduledQuery(org.apache.hadoop.hive.metastore.api.ScheduledQuery) Query(javax.jdo.Query) MScheduledQuery(org.apache.hadoop.hive.metastore.model.MScheduledQuery) MWMPool(org.apache.hadoop.hive.metastore.model.MWMPool) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MWMResourcePlan(org.apache.hadoop.hive.metastore.model.MWMResourcePlan) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidInputException(org.apache.hadoop.hive.metastore.api.InvalidInputException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) SQLIntegrityConstraintViolationException(java.sql.SQLIntegrityConstraintViolationException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidPartitionException(org.apache.hadoop.hive.metastore.api.InvalidPartitionException) UnknownPartitionException(org.apache.hadoop.hive.metastore.api.UnknownPartitionException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) JDOException(javax.jdo.JDOException) MissingTableException(org.datanucleus.store.rdbms.exceptions.MissingTableException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) JDODataStoreException(javax.jdo.JDODataStoreException) JDOObjectNotFoundException(javax.jdo.JDOObjectNotFoundException) UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException)

Example 45 with InvalidOperationException

use of org.apache.hadoop.hive.metastore.api.InvalidOperationException in project hive by apache.

the class ObjectStore method createPool.

@Override
public void createPool(WMPool pool) throws AlreadyExistsException, NoSuchObjectException, InvalidOperationException, MetaException {
    boolean commited = false;
    try {
        openTransaction();
        MWMResourcePlan resourcePlan = getMWMResourcePlan(pool.getResourcePlanName(), pool.getNs(), true);
        if (!poolParentExists(resourcePlan, pool.getPoolPath())) {
            throw new NoSuchObjectException("Pool path is invalid, the parent does not exist");
        }
        String policy = pool.getSchedulingPolicy();
        if (!MetaStoreUtils.isValidSchedulingPolicy(policy)) {
            throw new InvalidOperationException("Invalid scheduling policy " + policy);
        }
        MWMPool mPool = new MWMPool(resourcePlan, pool.getPoolPath(), pool.getAllocFraction(), pool.getQueryParallelism(), policy);
        pm.makePersistent(mPool);
        commited = commitTransaction();
    } catch (Exception e) {
        checkForConstraintException(e, "Pool already exists: ");
        throw e;
    } finally {
        rollbackAndCleanup(commited, (Query) null);
    }
}
Also used : MWMPool(org.apache.hadoop.hive.metastore.model.MWMPool) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MWMResourcePlan(org.apache.hadoop.hive.metastore.model.MWMResourcePlan) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) InvalidInputException(org.apache.hadoop.hive.metastore.api.InvalidInputException) InvalidOperationException(org.apache.hadoop.hive.metastore.api.InvalidOperationException) SQLIntegrityConstraintViolationException(java.sql.SQLIntegrityConstraintViolationException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidPartitionException(org.apache.hadoop.hive.metastore.api.InvalidPartitionException) UnknownPartitionException(org.apache.hadoop.hive.metastore.api.UnknownPartitionException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) JDOException(javax.jdo.JDOException) MissingTableException(org.datanucleus.store.rdbms.exceptions.MissingTableException) SQLException(java.sql.SQLException) UnknownDBException(org.apache.hadoop.hive.metastore.api.UnknownDBException) TException(org.apache.thrift.TException) JDODataStoreException(javax.jdo.JDODataStoreException) JDOObjectNotFoundException(javax.jdo.JDOObjectNotFoundException) UnknownTableException(org.apache.hadoop.hive.metastore.api.UnknownTableException)

Aggregations

InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)51 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)26 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)23 IOException (java.io.IOException)19 ArrayList (java.util.ArrayList)18 Table (org.apache.hadoop.hive.metastore.api.Table)17 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)16 TException (org.apache.thrift.TException)15 Partition (org.apache.hadoop.hive.metastore.api.Partition)14 FileSystem (org.apache.hadoop.fs.FileSystem)12 Path (org.apache.hadoop.fs.Path)12 List (java.util.List)10 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)10 InvalidInputException (org.apache.hadoop.hive.metastore.api.InvalidInputException)10 MWMResourcePlan (org.apache.hadoop.hive.metastore.model.MWMResourcePlan)9 SQLException (java.sql.SQLException)8 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)8 Test (org.junit.Test)8 LinkedList (java.util.LinkedList)7 Database (org.apache.hadoop.hive.metastore.api.Database)7