Search in sources :

Example 46 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class SQLOperation method runQuery.

private void runQuery() throws HiveSQLException {
    try {
        OperationState opState = getState();
        // Operation may have been cancelled by another thread
        if (opState.isTerminal()) {
            log.info("Not running the query. Operation is already in terminal state: " + opState + ", perhaps cancelled due to query timeout or by another thread.");
            return;
        }
        // In Hive server mode, we are not able to retry in the FetchTask
        // case, when calling fetch queries since execute() has returned.
        // For now, we disable the test attempts.
        driver.run();
    } catch (Throwable e) {
        /**
         * If the operation was cancelled by another thread, or the execution timed out, Driver#run
         * may return a non-zero response code. We will simply return if the operation state is
         * CANCELED, TIMEDOUT, CLOSED or FINISHED, otherwise throw an exception
         */
        if (getState().isTerminal()) {
            log.warn("Ignore exception in terminal state: {}", getState(), e);
            return;
        }
        setState(OperationState.ERROR);
        if (e instanceof CommandProcessorException) {
            throw toSQLException("Error while compiling statement", (CommandProcessorException) e);
        } else if (e instanceof HiveSQLException) {
            throw (HiveSQLException) e;
        } else if (e instanceof OutOfMemoryError) {
            throw (OutOfMemoryError) e;
        } else {
            throw new HiveSQLException("Error running query", e);
        }
    }
    setState(OperationState.FINISHED);
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) OperationState(org.apache.hive.service.cli.OperationState)

Example 47 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class SQLOperation method prepare.

/**
 * Compile the query and extract metadata
 *
 * @throws HiveSQLException
 */
private void prepare(QueryState queryState) throws HiveSQLException {
    setState(OperationState.RUNNING);
    try {
        driver = DriverFactory.newDriver(queryState, queryInfo);
        // queryTimeout == 0 means no timeout
        if (queryTimeout > 0L) {
            timeoutExecutor = Executors.newSingleThreadScheduledExecutor();
            timeoutExecutor.schedule(() -> {
                try {
                    final String queryId = queryState.getQueryId();
                    log.info("Query timed out after: {} seconds. Cancelling the execution now: {}", queryTimeout, queryId);
                    SQLOperation.this.cancel(OperationState.TIMEDOUT);
                } catch (HiveSQLException e) {
                    log.error("Error cancelling the query after timeout: {} seconds", queryTimeout, e);
                }
                return null;
            }, queryTimeout, TimeUnit.SECONDS);
        }
        queryInfo.setQueryDisplay(driver.getQueryDisplay());
        if (operationLog != null) {
            queryInfo.setOperationLogLocation(operationLog.toString());
        }
        // set the operation handle information in Driver, so that thrift API users
        // can use the operation handle they receive, to lookup query information in
        // Yarn ATS, also used in logging so remove padding for better display
        String guid64 = Base64.getUrlEncoder().withoutPadding().encodeToString(getHandle().getHandleIdentifier().toTHandleIdentifier().getGuid());
        driver.setOperationId(guid64);
        // In Hive server mode, we are not able to retry in the FetchTask
        // case, when calling fetch queries since execute() has returned.
        // For now, we disable the test attempts.
        driver.compileAndRespond(statement);
        if (queryState.getQueryTag() != null && queryState.getQueryId() != null) {
            parentSession.updateQueryTag(queryState.getQueryId(), queryState.getQueryTag());
        }
        setHasResultSet(driver.hasResultSet());
    } catch (CommandProcessorException e) {
        setState(OperationState.ERROR);
        throw toSQLException("Error while compiling statement", e);
    } catch (Throwable e) {
        setState(OperationState.ERROR);
        if (e instanceof OutOfMemoryError) {
            throw e;
        }
        throw new HiveSQLException("Error running query", e);
    }
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) HiveSQLException(org.apache.hive.service.cli.HiveSQLException)

Example 48 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestReplicationScenariosAcrossInstances method testIncrementalDumpEmptyDumpDirectory.

@Test
public void testIncrementalDumpEmptyDumpDirectory() throws Throwable {
    WarehouseInstance.Tuple tuple = primary.dump(primaryDbName);
    replica.load(replicatedDbName, primaryDbName).status(replicatedDbName).verifyResult(tuple.lastReplicationId);
    tuple = primary.dump(primaryDbName, Collections.emptyList());
    replica.load(replicatedDbName, primaryDbName).status(replicatedDbName).verifyResult(tuple.lastReplicationId);
    // create events for some other database and then dump the primaryDbName to dump an empty directory.
    String testDbName = primaryDbName + "_test";
    tuple = primary.run(" create database " + testDbName).run("create table " + testDbName + ".tbl (fld int)").dump(primaryDbName, Collections.emptyList());
    // Incremental load to existing database with empty dump directory should set the repl id to the last event at src.
    replica.load(replicatedDbName, primaryDbName).status(replicatedDbName).verifyResult(tuple.lastReplicationId);
    // Bootstrap load from an empty dump directory should return empty load directory error.
    tuple = primary.dump("someJunkDB", Collections.emptyList());
    try {
        replica.runCommand("REPL LOAD someJunkDB into someJunkDB");
        assert false;
    } catch (CommandProcessorException e) {
        assertTrue(e.getMessage().toLowerCase().contains("semanticException no data to load in path".toLowerCase()));
    }
    primary.run(" drop database if exists " + testDbName + " cascade");
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) Tuple(org.apache.hadoop.hive.ql.parse.WarehouseInstance.Tuple) Test(org.junit.Test)

Example 49 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestReplicationScenarios method testIncrementalReplWithEventsMissing.

@Test
public void testIncrementalReplWithEventsMissing() throws IOException, TException {
    String testName = "incrementalReplWithEventsMissing";
    String dbName = createDB(testName, driver);
    String replDbName = dbName + "_dupe";
    Tuple bootstrapDump = bootstrapLoadAndVerify(dbName, replDbName);
    String replDumpId = bootstrapDump.lastReplId;
    // CREATE_TABLE - INSERT - TRUNCATE - INSERT - The result is just one record.
    String[] unptn_data = new String[] { "eleven" };
    run("CREATE TABLE " + dbName + ".unptned(a string) STORED AS TEXTFILE", driver);
    run("INSERT INTO TABLE " + dbName + ".unptned values('ten')", driver);
    run("TRUNCATE TABLE " + dbName + ".unptned", driver);
    run("INSERT INTO TABLE " + dbName + ".unptned values('" + unptn_data[0] + "')", driver);
    // Inject a behaviour where some events missing from notification_log table.
    // This ensures the incremental dump doesn't get all events for replication.
    BehaviourInjection<NotificationEventResponse, NotificationEventResponse> eventIdSkipper = new BehaviourInjection<NotificationEventResponse, NotificationEventResponse>() {

        @Nullable
        @Override
        public NotificationEventResponse apply(@Nullable NotificationEventResponse eventIdList) {
            if (null != eventIdList) {
                List<NotificationEvent> eventIds = eventIdList.getEvents();
                List<NotificationEvent> outEventIds = new ArrayList<NotificationEvent>();
                for (int i = 0; i < eventIds.size(); i++) {
                    NotificationEvent event = eventIds.get(i);
                    // Skip all the INSERT events
                    if (event.getDbName().equalsIgnoreCase(dbName) && event.getEventType().equalsIgnoreCase("INSERT")) {
                        injectionPathCalled = true;
                        continue;
                    }
                    outEventIds.add(event);
                }
                // Return the new list
                return new NotificationEventResponse(outEventIds);
            } else {
                return null;
            }
        }
    };
    InjectableBehaviourObjectStore.setGetNextNotificationBehaviour(eventIdSkipper);
    try {
        advanceDumpDir();
        try {
            driver.run("REPL DUMP " + dbName);
            assert false;
        } catch (CommandProcessorException e) {
            assertTrue(e.getCauseMessage() == ErrorMsg.REPL_EVENTS_MISSING_IN_METASTORE.getMsg());
        }
        eventIdSkipper.assertInjectionsPerformed(true, false);
    } finally {
        // reset the behaviour
        InjectableBehaviourObjectStore.resetGetNextNotificationBehaviour();
    }
}
Also used : NotificationEventResponse(org.apache.hadoop.hive.metastore.api.NotificationEventResponse) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) BehaviourInjection(org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore.BehaviourInjection) ArrayList(java.util.ArrayList) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent) Nullable(javax.annotation.Nullable) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) Test(org.junit.Test)

Example 50 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestReplicationScenarios method testDumpWithTableDirMissing.

@Test
public void testDumpWithTableDirMissing() throws IOException {
    String dbName = createDB(testName.getMethodName(), driver);
    run("CREATE TABLE " + dbName + ".normal(a int)", driver);
    run("INSERT INTO " + dbName + ".normal values (1)", driver);
    Database db = null;
    Path path = null;
    try {
        metaStoreClient.getDatabase(dbName);
        path = new Path(db.getManagedLocationUri());
    } catch (Exception e) {
        path = new Path(System.getProperty("test.warehouse.dir", "/tmp/warehouse/managed"));
        path = new Path(path, dbName.toLowerCase() + ".db");
    }
    path = new Path(path, "normal");
    FileSystem fs = path.getFileSystem(hconf);
    fs.delete(path);
    advanceDumpDir();
    try {
        driver.run("REPL DUMP " + dbName);
        assert false;
    } catch (CommandProcessorException e) {
        Assert.assertEquals(e.getResponseCode(), ErrorMsg.FILE_NOT_FOUND.getErrorCode());
    }
    run("DROP TABLE " + dbName + ".normal", driver);
    run("drop database " + dbName, true, driver);
}
Also used : Path(org.apache.hadoop.fs.Path) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database) ReflectionException(javax.management.ReflectionException) MalformedObjectNameException(javax.management.MalformedObjectNameException) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) AttributeNotFoundException(javax.management.AttributeNotFoundException) TException(org.apache.thrift.TException) IOException(java.io.IOException) MBeanException(javax.management.MBeanException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) InstanceNotFoundException(javax.management.InstanceNotFoundException) Test(org.junit.Test)

Aggregations

CommandProcessorException (org.apache.hadoop.hive.ql.processors.CommandProcessorException)85 Test (org.junit.Test)42 IOException (java.io.IOException)14 CommandProcessorResponse (org.apache.hadoop.hive.ql.processors.CommandProcessorResponse)14 Driver (org.apache.hadoop.hive.ql.Driver)12 ArrayList (java.util.ArrayList)10 HiveConf (org.apache.hadoop.hive.conf.HiveConf)10 QTestProcessExecResult (org.apache.hadoop.hive.ql.QTestProcessExecResult)9 Path (org.apache.hadoop.fs.Path)8 FileSystem (org.apache.hadoop.fs.FileSystem)7 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)6 File (java.io.File)5 IDriver (org.apache.hadoop.hive.ql.IDriver)5 FileNotFoundException (java.io.FileNotFoundException)4 LockException (org.apache.hadoop.hive.ql.lockmgr.LockException)4 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)4 UnsupportedEncodingException (java.io.UnsupportedEncodingException)3 Map (java.util.Map)3 Nullable (javax.annotation.Nullable)3 Database (org.apache.hadoop.hive.metastore.api.Database)3