use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestReplicationScenariosAcidTables method testDumpAcidTableWithTableDirMissing.
@Test
public void testDumpAcidTableWithTableDirMissing() throws Throwable {
String dbName = testName.getMethodName();
primary.run("CREATE DATABASE " + dbName + " WITH DBPROPERTIES ( '" + SOURCE_OF_REPLICATION + "' = '1,2,3')").run("CREATE TABLE " + dbName + ".normal (a int) " + " STORED AS ORC TBLPROPERTIES ('transactional'='true')").run("INSERT INTO " + dbName + ".normal values (1)");
Path path = new Path(primary.warehouseRoot, dbName.toLowerCase() + ".db");
path = new Path(path, "normal");
FileSystem fs = path.getFileSystem(conf);
fs.delete(path);
try {
primary.runCommand("REPL DUMP " + dbName + " with ('hive.repl.dump.include.acid.tables' = 'true')");
assert false;
} catch (CommandProcessorException e) {
Assert.assertEquals(e.getResponseCode(), ErrorMsg.FILE_NOT_FOUND.getErrorCode());
}
primary.run("DROP TABLE " + dbName + ".normal");
primary.run("drop database " + dbName);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestReplicationScenariosAcidTables method testAcidTablesBootstrapWithConcurrentDropTable.
@Test
public void testAcidTablesBootstrapWithConcurrentDropTable() throws Throwable {
HiveConf primaryConf = primary.getConf();
primary.run("use " + primaryDbName).run("create table t1 (id int) clustered by(id) into 3 buckets stored as orc " + "tblproperties (\"transactional\"=\"true\")").run("insert into t1 values(1)");
// Perform concurrent write + drop on the acid table t1 when bootstrap dump in progress. Bootstrap
// won't dump the table but the subsequent incremental repl with new table with same name should be seen.
BehaviourInjection<CallerArguments, Boolean> callerInjectedBehavior = new BehaviourInjection<CallerArguments, Boolean>() {
@Nullable
@Override
public Boolean apply(@Nullable CallerArguments args) {
if (injectionPathCalled) {
nonInjectedPathCalled = true;
} else {
// Insert another row to t1 and drop the table from another txn when bootstrap dump in progress.
injectionPathCalled = true;
Thread t = new Thread(new Runnable() {
@Override
public void run() {
LOG.info("Entered new thread");
IDriver driver = DriverFactory.newDriver(primaryConf);
SessionState.start(new CliSessionState(primaryConf));
try {
driver.run("insert into " + primaryDbName + ".t1 values(2)");
driver.run("drop table " + primaryDbName + ".t1");
} catch (CommandProcessorException e) {
throw new RuntimeException(e);
}
LOG.info("Exit new thread success");
}
});
t.start();
LOG.info("Created new thread {}", t.getName());
try {
t.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
return true;
}
};
InjectableBehaviourObjectStore.setCallerVerifier(callerInjectedBehavior);
WarehouseInstance.Tuple bootstrapDump = null;
try {
bootstrapDump = primary.dump(primaryDbName);
callerInjectedBehavior.assertInjectionsPerformed(true, true);
} finally {
// reset the behaviour
InjectableBehaviourObjectStore.resetCallerVerifier();
}
// Bootstrap dump has taken latest list of tables and hence won't see table t1 as it is dropped.
replica.load(replicatedDbName, primaryDbName).run("use " + replicatedDbName).run("repl status " + replicatedDbName).verifyResult(bootstrapDump.lastReplicationId).run("show tables").verifyResult(null);
// Create another ACID table with same name and insert a row. It should be properly replicated.
WarehouseInstance.Tuple incrementalDump = primary.run("use " + primaryDbName).run("create table t1 (id int) clustered by(id) into 3 buckets stored as orc " + "tblproperties (\"transactional\"=\"true\")").run("insert into t1 values(100)").dump(primaryDbName);
replica.load(replicatedDbName, primaryDbName).run("use " + replicatedDbName).run("repl status " + replicatedDbName).verifyResult(incrementalDump.lastReplicationId).run("select id from t1 order by id").verifyResult("100");
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestReplicationScenarios method testDumpWithPartitionDirMissing.
@Test
public void testDumpWithPartitionDirMissing() throws IOException {
String dbName = createDB(testName.getMethodName(), driver);
run("CREATE TABLE " + dbName + ".normal(a int) PARTITIONED BY (part int)", driver);
run("INSERT INTO " + dbName + ".normal partition (part= 124) values (1)", driver);
Database db = null;
Path path = null;
try {
metaStoreClient.getDatabase(dbName);
path = new Path(db.getManagedLocationUri());
} catch (Exception e) {
path = new Path(System.getProperty("test.warehouse.dir", "/tmp/warehouse/managed"));
path = new Path(path, dbName.toLowerCase() + ".db");
}
path = new Path(path, "normal");
path = new Path(path, "part=124");
FileSystem fs = path.getFileSystem(hconf);
fs.delete(path);
advanceDumpDir();
try {
driver.run("REPL DUMP " + dbName);
assert false;
} catch (CommandProcessorException e) {
Assert.assertEquals(e.getResponseCode(), ErrorMsg.FILE_NOT_FOUND.getErrorCode());
}
run("DROP TABLE " + dbName + ".normal", driver);
run("drop database " + dbName, true, driver);
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class Compiler method initialize.
private void initialize(String rawCommand) throws CommandProcessorException {
perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.COMPILE);
driverState.compilingWithLocking();
VariableSubstitution variableSubstitution = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
});
String command = variableSubstitution.substitute(driverContext.getConf(), rawCommand);
String queryStr = command;
try {
// command should be redacted to avoid to logging sensitive data
queryStr = HookUtils.redactLogString(driverContext.getConf(), command);
} catch (Exception e) {
LOG.warn("WARNING! Query command could not be redacted." + e);
}
DriverUtils.checkInterrupted(driverState, driverContext, "at beginning of compilation.", null, null);
context.setCmd(command);
driverContext.getQueryDisplay().setQueryStr(queryStr);
LOG.info("Compiling command(queryId=" + driverContext.getQueryId() + "): " + queryStr);
driverContext.getConf().setQueryString(queryStr);
// FIXME: side effect will leave the last query set at the session level
if (SessionState.get() != null) {
SessionState.get().getConf().setQueryString(queryStr);
SessionState.get().setupQueryCurrentTimestamp();
}
}
use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.
the class TestTxnCommands method testMergeNegative.
@Test
public void testMergeNegative() throws Exception {
CommandProcessorException e = runStatementOnDriverNegative("MERGE INTO " + Table.ACIDTBL + " target\n" + "USING " + Table.NONACIDORCTBL + " source ON target.a = source.a\n" + "WHEN MATCHED THEN UPDATE set b = 1\n" + "WHEN MATCHED THEN DELETE\n" + "WHEN NOT MATCHED AND a < 1 THEN INSERT VALUES(1,2)");
Assert.assertEquals(ErrorMsg.MERGE_PREDIACTE_REQUIRED, ((HiveException) e.getCause()).getCanonicalErrorMsg());
}
Aggregations