use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class ReplicationSemanticAnalyzer method analyzeReplStatus.
private void analyzeReplStatus(ASTNode ast) throws SemanticException {
LOG.debug("ReplicationSemanticAnalyzer.analyzeReplStatus: " + String.valueOf(dbNameOrPattern) + "." + String.valueOf(tblNameOrPattern));
String replLastId = null;
try {
Hive newDb;
if (needNewdb) {
newDb = Hive.get(conf, false);
} else {
newDb = db;
}
if (tblNameOrPattern != null) {
// Checking for status of table
Table tbl = newDb.getTable(dbNameOrPattern, tblNameOrPattern);
if (tbl != null) {
inputs.add(new ReadEntity(tbl));
Map<String, String> params = tbl.getParameters();
if (params != null && (params.containsKey(ReplicationSpec.KEY.CURR_STATE_ID.toString()))) {
replLastId = params.get(ReplicationSpec.KEY.CURR_STATE_ID.toString());
}
}
} else {
// Checking for status of a db
Database database = newDb.getDatabase(dbNameOrPattern);
if (database != null) {
inputs.add(new ReadEntity(database));
Map<String, String> params = database.getParameters();
if (params != null && (params.containsKey(ReplicationSpec.KEY.CURR_STATE_ID.toString()))) {
replLastId = params.get(ReplicationSpec.KEY.CURR_STATE_ID.toString());
}
}
}
} catch (HiveException e) {
// TODO : simple wrap & rethrow for now, clean up with error
throw new SemanticException(e);
// codes
}
prepareReturnValues(Collections.singletonList(replLastId), "last_repl_id#string");
setFetchTask(createFetchTask("last_repl_id#string"));
LOG.debug("ReplicationSemanticAnalyzer.analyzeReplStatus: writing repl.last.id={} out to {}", String.valueOf(replLastId), ctx.getResFile(), conf);
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class TestDbTxnManager method setUp.
@Before
public void setUp() throws Exception {
TxnDbUtil.prepDb(conf);
txnMgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf);
// init lock manager
txnMgr.getLockManager();
Assert.assertTrue(txnMgr instanceof DbTxnManager);
nextInput = 1;
readEntities = new HashSet<ReadEntity>();
writeEntities = new HashSet<WriteEntity>();
conf.setTimeVar(HiveConf.ConfVars.HIVE_TIMEDOUT_TXN_REAPER_START, 0, TimeUnit.SECONDS);
conf.setTimeVar(HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 10, TimeUnit.SECONDS);
houseKeeperService = new AcidHouseKeeperService();
houseKeeperService.setConf(conf);
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class TestReadEntityDirect method testSelectEntityInDirectJoinAlias.
/**
* Underlying table of view should be marked as indirect. Query with join of views and aliases
*
* @throws ParseException
*/
@Test
public void testSelectEntityInDirectJoinAlias() throws ParseException {
Driver driver = createDriver();
int ret = driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)");
assertEquals("Checking command success", 0, ret);
assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
if (readEntity.getName().equals("default@t1")) {
assertFalse("not direct", readEntity.isDirect());
} else if (readEntity.getName().equals("default@v1")) {
assertTrue("direct", readEntity.isDirect());
} else {
fail("unexpected entity name " + readEntity.getName());
}
}
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class TestReadEntityDirect method testSelectEntityInDirect.
/**
* Underlying table of view should be marked as indirect
*
* @throws ParseException
*/
@Test
public void testSelectEntityInDirect() throws ParseException {
Driver driver = createDriver();
int ret = driver.compile("select * from v1");
assertEquals("Checking command success", 0, ret);
assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
if (readEntity.getName().equals("default@t1")) {
assertFalse("not direct", readEntity.isDirect());
} else if (readEntity.getName().equals("default@v1")) {
assertTrue("direct", readEntity.isDirect());
} else {
fail("unexpected entity name " + readEntity.getName());
}
}
}
use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.
the class TestReadEntityDirect method testSelectEntityViewDirectUnion.
/**
* Underlying table of view should be marked as direct, as it is also accessed
* directly in the union-all query
*
* @throws ParseException
*/
@Test
public void testSelectEntityViewDirectUnion() throws ParseException {
Driver driver = createDriver();
int ret = driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1");
assertEquals("Checking command success", 0, ret);
assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
if (readEntity.getName().equals("default@t1")) {
assertTrue("direct", readEntity.isDirect());
} else if (readEntity.getName().equals("default@v1")) {
assertTrue("direct", readEntity.isDirect());
} else {
fail("unexpected entity name " + readEntity.getName());
}
}
}
Aggregations