Search in sources :

Example 51 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestReplicationScenariosAcidTables method testDumpAcidTableWithTableDirMissing.

@Test
public void testDumpAcidTableWithTableDirMissing() throws Throwable {
    String dbName = testName.getMethodName();
    primary.run("CREATE DATABASE " + dbName + " WITH DBPROPERTIES ( '" + SOURCE_OF_REPLICATION + "' = '1,2,3')").run("CREATE TABLE " + dbName + ".normal (a int) " + " STORED AS ORC TBLPROPERTIES ('transactional'='true')").run("INSERT INTO " + dbName + ".normal values (1)");
    Path path = new Path(primary.warehouseRoot, dbName.toLowerCase() + ".db");
    path = new Path(path, "normal");
    FileSystem fs = path.getFileSystem(conf);
    fs.delete(path);
    try {
        primary.runCommand("REPL DUMP " + dbName + " with ('hive.repl.dump.include.acid.tables' = 'true')");
        assert false;
    } catch (CommandProcessorException e) {
        Assert.assertEquals(e.getResponseCode(), ErrorMsg.FILE_NOT_FOUND.getErrorCode());
    }
    primary.run("DROP TABLE " + dbName + ".normal");
    primary.run("drop database " + dbName);
}
Also used : Path(org.apache.hadoop.fs.Path) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) FileSystem(org.apache.hadoop.fs.FileSystem) Test(org.junit.Test)

Example 52 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestReplicationScenariosAcidTables method testAcidTablesBootstrapWithConcurrentDropTable.

@Test
public void testAcidTablesBootstrapWithConcurrentDropTable() throws Throwable {
    HiveConf primaryConf = primary.getConf();
    primary.run("use " + primaryDbName).run("create table t1 (id int) clustered by(id) into 3 buckets stored as orc " + "tblproperties (\"transactional\"=\"true\")").run("insert into t1 values(1)");
    // Perform concurrent write + drop on the acid table t1 when bootstrap dump in progress. Bootstrap
    // won't dump the table but the subsequent incremental repl with new table with same name should be seen.
    BehaviourInjection<CallerArguments, Boolean> callerInjectedBehavior = new BehaviourInjection<CallerArguments, Boolean>() {

        @Nullable
        @Override
        public Boolean apply(@Nullable CallerArguments args) {
            if (injectionPathCalled) {
                nonInjectedPathCalled = true;
            } else {
                // Insert another row to t1 and drop the table from another txn when bootstrap dump in progress.
                injectionPathCalled = true;
                Thread t = new Thread(new Runnable() {

                    @Override
                    public void run() {
                        LOG.info("Entered new thread");
                        IDriver driver = DriverFactory.newDriver(primaryConf);
                        SessionState.start(new CliSessionState(primaryConf));
                        try {
                            driver.run("insert into " + primaryDbName + ".t1 values(2)");
                            driver.run("drop table " + primaryDbName + ".t1");
                        } catch (CommandProcessorException e) {
                            throw new RuntimeException(e);
                        }
                        LOG.info("Exit new thread success");
                    }
                });
                t.start();
                LOG.info("Created new thread {}", t.getName());
                try {
                    t.join();
                } catch (InterruptedException e) {
                    throw new RuntimeException(e);
                }
            }
            return true;
        }
    };
    InjectableBehaviourObjectStore.setCallerVerifier(callerInjectedBehavior);
    WarehouseInstance.Tuple bootstrapDump = null;
    try {
        bootstrapDump = primary.dump(primaryDbName);
        callerInjectedBehavior.assertInjectionsPerformed(true, true);
    } finally {
        // reset the behaviour
        InjectableBehaviourObjectStore.resetCallerVerifier();
    }
    // Bootstrap dump has taken latest list of tables and hence won't see table t1 as it is dropped.
    replica.load(replicatedDbName, primaryDbName).run("use " + replicatedDbName).run("repl status " + replicatedDbName).verifyResult(bootstrapDump.lastReplicationId).run("show tables").verifyResult(null);
    // Create another ACID table with same name and insert a row. It should be properly replicated.
    WarehouseInstance.Tuple incrementalDump = primary.run("use " + primaryDbName).run("create table t1 (id int) clustered by(id) into 3 buckets stored as orc " + "tblproperties (\"transactional\"=\"true\")").run("insert into t1 values(100)").dump(primaryDbName);
    replica.load(replicatedDbName, primaryDbName).run("use " + replicatedDbName).run("repl status " + replicatedDbName).verifyResult(incrementalDump.lastReplicationId).run("select id from t1 order by id").verifyResult("100");
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) BehaviourInjection(org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore.BehaviourInjection) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) CallerArguments(org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore.CallerArguments) IDriver(org.apache.hadoop.hive.ql.IDriver) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Nullable(javax.annotation.Nullable) Test(org.junit.Test)

Example 53 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestReplicationScenarios method testDumpWithPartitionDirMissing.

@Test
public void testDumpWithPartitionDirMissing() throws IOException {
    String dbName = createDB(testName.getMethodName(), driver);
    run("CREATE TABLE " + dbName + ".normal(a int) PARTITIONED BY (part int)", driver);
    run("INSERT INTO " + dbName + ".normal partition (part= 124) values (1)", driver);
    Database db = null;
    Path path = null;
    try {
        metaStoreClient.getDatabase(dbName);
        path = new Path(db.getManagedLocationUri());
    } catch (Exception e) {
        path = new Path(System.getProperty("test.warehouse.dir", "/tmp/warehouse/managed"));
        path = new Path(path, dbName.toLowerCase() + ".db");
    }
    path = new Path(path, "normal");
    path = new Path(path, "part=124");
    FileSystem fs = path.getFileSystem(hconf);
    fs.delete(path);
    advanceDumpDir();
    try {
        driver.run("REPL DUMP " + dbName);
        assert false;
    } catch (CommandProcessorException e) {
        Assert.assertEquals(e.getResponseCode(), ErrorMsg.FILE_NOT_FOUND.getErrorCode());
    }
    run("DROP TABLE " + dbName + ".normal", driver);
    run("drop database " + dbName, true, driver);
}
Also used : Path(org.apache.hadoop.fs.Path) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database) ReflectionException(javax.management.ReflectionException) MalformedObjectNameException(javax.management.MalformedObjectNameException) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) AttributeNotFoundException(javax.management.AttributeNotFoundException) TException(org.apache.thrift.TException) IOException(java.io.IOException) MBeanException(javax.management.MBeanException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) InstanceNotFoundException(javax.management.InstanceNotFoundException) Test(org.junit.Test)

Example 54 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class Compiler method initialize.

private void initialize(String rawCommand) throws CommandProcessorException {
    perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.COMPILE);
    driverState.compilingWithLocking();
    VariableSubstitution variableSubstitution = new VariableSubstitution(new HiveVariableSource() {

        @Override
        public Map<String, String> getHiveVariable() {
            return SessionState.get().getHiveVariables();
        }
    });
    String command = variableSubstitution.substitute(driverContext.getConf(), rawCommand);
    String queryStr = command;
    try {
        // command should be redacted to avoid to logging sensitive data
        queryStr = HookUtils.redactLogString(driverContext.getConf(), command);
    } catch (Exception e) {
        LOG.warn("WARNING! Query command could not be redacted." + e);
    }
    DriverUtils.checkInterrupted(driverState, driverContext, "at beginning of compilation.", null, null);
    context.setCmd(command);
    driverContext.getQueryDisplay().setQueryStr(queryStr);
    LOG.info("Compiling command(queryId=" + driverContext.getQueryId() + "): " + queryStr);
    driverContext.getConf().setQueryString(queryStr);
    // FIXME: side effect will leave the last query set at the session level
    if (SessionState.get() != null) {
        SessionState.get().getConf().setQueryString(queryStr);
        SessionState.get().setupQueryCurrentTimestamp();
    }
}
Also used : VariableSubstitution(org.apache.hadoop.hive.conf.VariableSubstitution) HiveVariableSource(org.apache.hadoop.hive.conf.HiveVariableSource) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) AuthorizationException(org.apache.hadoop.hive.ql.metadata.AuthorizationException) ParseException(org.apache.hadoop.hive.ql.parse.ParseException) ReCompileException(org.apache.hadoop.hive.ql.reexec.ReCompileException) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) TException(org.apache.thrift.TException) IOException(java.io.IOException) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 55 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestTxnCommands method testMergeNegative.

@Test
public void testMergeNegative() throws Exception {
    CommandProcessorException e = runStatementOnDriverNegative("MERGE INTO " + Table.ACIDTBL + " target\n" + "USING " + Table.NONACIDORCTBL + " source ON target.a = source.a\n" + "WHEN MATCHED THEN UPDATE set b = 1\n" + "WHEN MATCHED THEN DELETE\n" + "WHEN NOT MATCHED AND a < 1 THEN INSERT VALUES(1,2)");
    Assert.assertEquals(ErrorMsg.MERGE_PREDIACTE_REQUIRED, ((HiveException) e.getCause()).getCanonicalErrorMsg());
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) Test(org.junit.Test)

Aggregations

CommandProcessorException (org.apache.hadoop.hive.ql.processors.CommandProcessorException)85 Test (org.junit.Test)42 IOException (java.io.IOException)14 CommandProcessorResponse (org.apache.hadoop.hive.ql.processors.CommandProcessorResponse)14 Driver (org.apache.hadoop.hive.ql.Driver)12 ArrayList (java.util.ArrayList)10 HiveConf (org.apache.hadoop.hive.conf.HiveConf)10 QTestProcessExecResult (org.apache.hadoop.hive.ql.QTestProcessExecResult)9 Path (org.apache.hadoop.fs.Path)8 FileSystem (org.apache.hadoop.fs.FileSystem)7 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)6 File (java.io.File)5 IDriver (org.apache.hadoop.hive.ql.IDriver)5 FileNotFoundException (java.io.FileNotFoundException)4 LockException (org.apache.hadoop.hive.ql.lockmgr.LockException)4 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)4 UnsupportedEncodingException (java.io.UnsupportedEncodingException)3 Map (java.util.Map)3 Nullable (javax.annotation.Nullable)3 Database (org.apache.hadoop.hive.metastore.api.Database)3