Search in sources :

Example 11 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestCommands method testDropDatabaseCommand.

@Test
public void testDropDatabaseCommand() throws HCatException, CommandProcessorException {
    String dbName = "cmd_testdb";
    int evid = 999;
    Command testCmd = new DropDatabaseCommand(dbName, evid);
    assertEquals(evid, testCmd.getEventId());
    assertEquals(1, testCmd.get().size());
    assertEquals(true, testCmd.isRetriable());
    assertEquals(false, testCmd.isUndoable());
    CommandTestUtils.testCommandSerialization(testCmd);
    client.dropDatabase(dbName, true, HCatClient.DropDBMode.CASCADE);
    client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build());
    HCatDatabase db = client.getDatabase(dbName);
    assertNotNull(db);
    LOG.info("About to run :" + testCmd.get().get(0));
    driver.run(testCmd.get().get(0));
    Exception onfe = null;
    try {
        HCatDatabase db_del = client.getDatabase(dbName);
    } catch (Exception e) {
        onfe = e;
    }
    assertNotNull(onfe);
    assertTrue(onfe instanceof ObjectNotFoundException);
}
Also used : HCatDatabase(org.apache.hive.hcatalog.api.HCatDatabase) Command(org.apache.hive.hcatalog.api.repl.Command) ObjectNotFoundException(org.apache.hive.hcatalog.api.ObjectNotFoundException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) HCatException(org.apache.hive.hcatalog.common.HCatException) ObjectNotFoundException(org.apache.hive.hcatalog.api.ObjectNotFoundException) IOException(java.io.IOException) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) Test(org.junit.Test)

Example 12 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestMetastoreAuthorizationProvider method testSimplePrivileges.

@Test
public void testSimplePrivileges() throws Exception {
    if (!isTestEnabled()) {
        System.out.println("Skipping test " + this.getClass().getName());
        return;
    }
    String dbName = getTestDbName();
    String tblName = getTestTableName();
    String userName = setupUser();
    String loc = clientHiveConf.get(HiveConf.ConfVars.HIVE_METASTORE_WAREHOUSE_EXTERNAL.varname) + "/" + dbName;
    String mLoc = clientHiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname) + "/" + dbName;
    allowCreateDatabase(userName);
    driver.run("create database " + dbName + " location '" + loc + "' managedlocation '" + mLoc + "'");
    Database db = msc.getDatabase(dbName);
    String dbLocn = db.getManagedLocationUri();
    validateCreateDb(db, dbName);
    allowCreateInDb(dbName, userName, dbLocn);
    disallowCreateInDb(dbName, userName, dbLocn);
    disallowCreateDatabase(userName);
    driver.run("use " + dbName);
    try {
        driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName));
        assert false;
    } catch (CommandProcessorException e) {
        assertEquals(40000, e.getResponseCode());
    }
    // Even if table location is specified table creation should fail
    String tblNameLoc = tblName + "_loc";
    String tblLocation = new Path(dbLocn).getParent().toUri() + "/" + tblNameLoc;
    if (mayTestLocation()) {
        driver.run("use " + dbName);
        try {
            driver.run(String.format("create table %s (a string) partitioned by (b string) location '" + tblLocation + "'", tblNameLoc));
        } catch (CommandProcessorException e) {
            assertEquals(40000, e.getResponseCode());
        }
    }
    // failure from not having permissions to create table
    ArrayList<FieldSchema> fields = new ArrayList<FieldSchema>(2);
    fields.add(new FieldSchema("a", serdeConstants.STRING_TYPE_NAME, ""));
    Table ttbl = new Table();
    ttbl.setDbName(dbName);
    ttbl.setTableName(tblName);
    StorageDescriptor sd = new StorageDescriptor();
    ttbl.setSd(sd);
    sd.setCols(fields);
    sd.setParameters(new HashMap<String, String>());
    sd.getParameters().put("test_param_1", "Use this for comments etc");
    sd.setSerdeInfo(new SerDeInfo());
    sd.getSerdeInfo().setName(ttbl.getTableName());
    sd.getSerdeInfo().setParameters(new HashMap<String, String>());
    sd.getSerdeInfo().getParameters().put(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1");
    sd.getSerdeInfo().setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
    sd.setInputFormat(HiveInputFormat.class.getName());
    sd.setOutputFormat(HiveOutputFormat.class.getName());
    ttbl.setPartitionKeys(new ArrayList<FieldSchema>());
    MetaException me = null;
    try {
        msc.createTable(ttbl);
    } catch (MetaException e) {
        me = e;
    }
    assertNoPrivileges(me);
    allowCreateInDb(dbName, userName, dbLocn);
    driver.run("use " + dbName);
    driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName));
    Table tbl = msc.getTable(dbName, tblName);
    Assert.assertTrue(tbl.isSetId());
    tbl.unsetId();
    validateCreateTable(tbl, tblName, dbName);
    // Table creation should succeed even if location is specified
    if (mayTestLocation()) {
        driver.run("use " + dbName);
        driver.run(String.format("create table %s (a string) partitioned by (b string) location '" + tblLocation + "'", tblNameLoc));
        Table tblLoc = msc.getTable(dbName, tblNameLoc);
        validateCreateTable(tblLoc, tblNameLoc, dbName);
    }
    String fakeUser = "mal";
    List<String> fakeGroupNames = new ArrayList<String>();
    fakeGroupNames.add("groupygroup");
    InjectableDummyAuthenticator.injectUserName(fakeUser);
    InjectableDummyAuthenticator.injectGroupNames(fakeGroupNames);
    InjectableDummyAuthenticator.injectMode(true);
    try {
        driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName + "mal"));
    } catch (CommandProcessorException e) {
        assertEquals(40000, e.getResponseCode());
    }
    ttbl.setTableName(tblName + "mal");
    me = null;
    try {
        msc.createTable(ttbl);
    } catch (MetaException e) {
        me = e;
    }
    assertNoPrivileges(me);
    allowCreateInTbl(tbl.getTableName(), userName, tbl.getSd().getLocation());
    disallowCreateInTbl(tbl.getTableName(), userName, tbl.getSd().getLocation());
    try {
        driver.run("alter table " + tblName + " add partition (b='2011')");
    } catch (CommandProcessorException e) {
        assertEquals(40000, e.getResponseCode());
    }
    List<String> ptnVals = new ArrayList<String>();
    ptnVals.add("b=2011");
    Partition tpart = new Partition();
    tpart.setDbName(dbName);
    tpart.setTableName(tblName);
    tpart.setValues(ptnVals);
    tpart.setParameters(new HashMap<String, String>());
    tpart.setSd(tbl.getSd().deepCopy());
    tpart.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo().deepCopy());
    tpart.getSd().setLocation(tbl.getSd().getLocation() + "/tpart");
    me = null;
    try {
        msc.add_partition(tpart);
    } catch (MetaException e) {
        me = e;
    }
    assertNoPrivileges(me);
    InjectableDummyAuthenticator.injectMode(false);
    allowCreateInTbl(tbl.getTableName(), userName, tbl.getSd().getLocation());
    driver.run("alter table " + tblName + " add partition (b='2011')");
    String proxyUserName = getProxyUserName();
    if (proxyUserName != null) {
        // for storage based authorization, user having proxy privilege should be allowed to do operation
        // even if the file permission is not there.
        InjectableDummyAuthenticator.injectUserName(proxyUserName);
        InjectableDummyAuthenticator.injectGroupNames(Collections.singletonList(proxyUserName));
        InjectableDummyAuthenticator.injectMode(true);
        disallowCreateInTbl(tbl.getTableName(), proxyUserName, tbl.getSd().getLocation());
        driver.run("alter table " + tblName + " add partition (b='2012')");
        InjectableDummyAuthenticator.injectMode(false);
    }
    allowDropOnTable(tblName, userName, tbl.getSd().getLocation());
    allowDropOnDb(dbName, userName, db.getLocationUri());
    driver.run("drop database if exists " + getTestDbName() + " cascade");
    InjectableDummyAuthenticator.injectUserName(userName);
    InjectableDummyAuthenticator.injectGroupNames(Arrays.asList(ugi.getGroupNames()));
    InjectableDummyAuthenticator.injectMode(true);
    allowCreateDatabase(userName);
    driver.run("create database " + dbName);
    db = msc.getDatabase(dbName);
    dbLocn = db.getLocationUri();
    allowCreateInDb(dbName, userName, dbLocn);
    dbLocn = db.getManagedLocationUri();
    if (dbLocn != null) {
        allowCreateInDb(dbName, userName, dbLocn);
    }
    tbl.setTableType("EXTERNAL_TABLE");
    msc.createTable(tbl);
    allowDropOnTable(tblName, userName, tbl.getSd().getLocation());
    disallowDropOnTable(tblName, userName, tbl.getSd().getLocation());
    try {
        driver.run("drop table " + tbl.getTableName());
    } catch (CommandProcessorException e) {
        assertEquals(40000, e.getResponseCode());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Partition(org.apache.hadoop.hive.metastore.api.Partition) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) Table(org.apache.hadoop.hive.metastore.api.Table) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) HiveOutputFormat(org.apache.hadoop.hive.ql.io.HiveOutputFormat) HiveInputFormat(org.apache.hadoop.hive.ql.io.HiveInputFormat) Database(org.apache.hadoop.hive.metastore.api.Database) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Test(org.junit.Test)

Example 13 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class Compiler method compile.

/**
 * @param deferClose indicates if the close/destroy should be deferred when the process has been interrupted
 *             it should be set to true if the compile is called within another method like runInternal,
 *             which defers the close to the called in that method.
 */
public QueryPlan compile(String rawCommand, boolean deferClose) throws CommandProcessorException {
    initialize(rawCommand);
    Throwable compileException = null;
    boolean parsed = false;
    QueryPlan plan = null;
    try {
        DriverUtils.checkInterrupted(driverState, driverContext, "before parsing and analysing the query", null, null);
        parse();
        parsed = true;
        BaseSemanticAnalyzer sem = analyze();
        DriverUtils.checkInterrupted(driverState, driverContext, "after analyzing query.", null, null);
        plan = createPlan(sem);
        initializeFetchTask(plan);
        authorize(sem);
        explainOutput(sem, plan);
    } catch (CommandProcessorException cpe) {
        compileException = cpe.getCause();
        throw cpe;
    } catch (Exception e) {
        compileException = e;
        DriverUtils.checkInterrupted(driverState, driverContext, "during query compilation: " + e.getMessage(), null, null);
        handleException(e);
    } finally {
        cleanUp(compileException, parsed, deferClose);
    }
    return plan;
}
Also used : BaseSemanticAnalyzer(org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) AuthorizationException(org.apache.hadoop.hive.ql.metadata.AuthorizationException) ParseException(org.apache.hadoop.hive.ql.parse.ParseException) ReCompileException(org.apache.hadoop.hive.ql.reexec.ReCompileException) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) TException(org.apache.thrift.TException) IOException(java.io.IOException) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 14 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class Driver method compileInternal.

private void compileInternal(String command, boolean deferClose) throws CommandProcessorException {
    Metrics metrics = MetricsFactory.getInstance();
    if (metrics != null) {
        metrics.incrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1);
    }
    PerfLogger perfLogger = SessionState.getPerfLogger(true);
    perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.WAIT_COMPILE);
    try (CompileLock compileLock = CompileLockFactory.newInstance(driverContext.getConf(), command)) {
        boolean success = compileLock.tryAcquire();
        perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.WAIT_COMPILE);
        if (metrics != null) {
            metrics.decrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1);
        }
        if (!success) {
            String errorMessage = ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCodedMsg();
            throw DriverUtils.createProcessorException(driverContext, ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode(), errorMessage, null, null);
        }
        try {
            compile(command, true, deferClose);
        } catch (CommandProcessorException cpe) {
            try {
                driverTxnHandler.endTransactionAndCleanup(false);
            } catch (LockException e) {
                LOG.warn("Exception in releasing locks", e);
            }
            throw cpe;
        }
    }
    // Save compile-time PerfLogging for WebUI.
    // Execution-time Perf logs are done by either another thread's PerfLogger or a reset PerfLogger.
    driverContext.getQueryDisplay().setPerfLogStarts(QueryDisplay.Phase.COMPILATION, perfLogger.getStartTimes());
    driverContext.getQueryDisplay().setPerfLogEnds(QueryDisplay.Phase.COMPILATION, perfLogger.getEndTimes());
}
Also used : Metrics(org.apache.hadoop.hive.common.metrics.common.Metrics) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) PerfLogger(org.apache.hadoop.hive.ql.log.PerfLogger) CompileLock(org.apache.hadoop.hive.ql.lock.CompileLock)

Example 15 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class DriverUtils method runOnDriver.

/**
 * For Query Based compaction to run the query to generate the compacted data.
 */
public static void runOnDriver(HiveConf conf, String user, SessionState sessionState, String query, ValidWriteIdList writeIds, long compactorTxnId) throws HiveException {
    if (writeIds != null && compactorTxnId < 0) {
        throw new IllegalArgumentException(JavaUtils.txnIdToString(compactorTxnId) + " is not valid. Context: " + query);
    }
    SessionState.setCurrentSessionState(sessionState);
    boolean isOk = false;
    try {
        QueryState qs = new QueryState.Builder().withHiveConf(conf).withGenerateNewQueryId(true).nonIsolated().build();
        Driver driver = new Driver(qs, null, null, writeIds, compactorTxnId);
        try {
            try {
                driver.run(query);
            } catch (CommandProcessorException e) {
                LOG.error("Failed to run " + query, e);
                throw new HiveException("Failed to run " + query, e);
            }
        } finally {
            driver.close();
            driver.destroy();
        }
        isOk = true;
    } finally {
        if (!isOk) {
            try {
                // This also resets SessionState.get.
                sessionState.close();
            } catch (Throwable th) {
                LOG.warn("Failed to close a bad session", th);
                SessionState.detachSession();
            }
        }
    }
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Aggregations

CommandProcessorException (org.apache.hadoop.hive.ql.processors.CommandProcessorException)85 Test (org.junit.Test)42 IOException (java.io.IOException)14 CommandProcessorResponse (org.apache.hadoop.hive.ql.processors.CommandProcessorResponse)14 Driver (org.apache.hadoop.hive.ql.Driver)12 ArrayList (java.util.ArrayList)10 HiveConf (org.apache.hadoop.hive.conf.HiveConf)10 QTestProcessExecResult (org.apache.hadoop.hive.ql.QTestProcessExecResult)9 Path (org.apache.hadoop.fs.Path)8 FileSystem (org.apache.hadoop.fs.FileSystem)7 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)6 File (java.io.File)5 IDriver (org.apache.hadoop.hive.ql.IDriver)5 FileNotFoundException (java.io.FileNotFoundException)4 LockException (org.apache.hadoop.hive.ql.lockmgr.LockException)4 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)4 UnsupportedEncodingException (java.io.UnsupportedEncodingException)3 Map (java.util.Map)3 Nullable (javax.annotation.Nullable)3 Database (org.apache.hadoop.hive.metastore.api.Database)3