Search in sources :

Example 11 with FetchTask

use of org.apache.hadoop.hive.ql.exec.FetchTask in project hive by apache.

the class TestDbTxnManagerIsolationProperties method gapOpenTxnsNoDirtyRead.

@Test
public void gapOpenTxnsNoDirtyRead() throws Exception {
    driver.run(("drop table if exists gap"));
    driver.run("create table gap (a int, b int) " + "stored as orc TBLPROPERTIES ('transactional'='true')");
    // Create one TXN to delete later
    driver.compileAndRespond("select * from gap");
    long first = txnMgr.getCurrentTxnId();
    driver.run();
    // The second one we use for Low water mark
    driver.run("select * from gap");
    DbTxnManager txnMgr2 = (DbTxnManager) TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf);
    swapTxnManager(txnMgr2);
    // Make sure, that the time window is great enough to consider the gap open
    txnHandler.setOpenTxnTimeOutMillis(30000);
    // Create a gap
    deleteTransactionId(first);
    CommandProcessorResponse resp = driver2.compileAndRespond("select * from gap");
    long third = txnMgr2.getCurrentTxnId();
    Assert.assertTrue("Sequence number goes onward", third > first);
    ValidTxnList validTxns = txnMgr2.getValidTxns();
    Assert.assertEquals("Expect to see the gap as open", first, (long) validTxns.getMinOpenTxn());
    txnHandler.setOpenTxnTimeOutMillis(1000);
    // Now we cheat and create a transaction with the first sequenceId again imitating a very slow openTxns call
    setBackSequence(first);
    swapTxnManager(txnMgr);
    driver.compileAndRespond("insert into gap values(1,2)");
    long forth = txnMgr.getCurrentTxnId();
    Assert.assertEquals(first, forth);
    driver.run();
    // Now we run our read query it should not see the write results of the insert
    swapTxnManager(txnMgr2);
    driver2.run();
    FetchTask fetchTask = driver2.getFetchTask();
    List res = new ArrayList();
    fetchTask.fetch(res);
    Assert.assertEquals("No dirty read", 0, res.size());
}
Also used : CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) ValidTxnList(org.apache.hadoop.hive.common.ValidTxnList) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ValidTxnList(org.apache.hadoop.hive.common.ValidTxnList) FetchTask(org.apache.hadoop.hive.ql.exec.FetchTask) Test(org.junit.Test)

Example 12 with FetchTask

use of org.apache.hadoop.hive.ql.exec.FetchTask in project hive by apache.

the class TestDbTxnManagerIsolationProperties method testRebuildMVWhenOpenTxnPresents.

@Test
public void testRebuildMVWhenOpenTxnPresents() throws Exception {
    driver.run(("drop table if exists t1"));
    driver.run("create table t1 (a int, b int) stored as orc TBLPROPERTIES ('transactional'='true')");
    driver.run("insert into t1 values(1,2),(2,2)");
    driver.run("create materialized view mat1 stored as orc " + "TBLPROPERTIES ('transactional'='true') as " + "select a,b from t1 where a > 1");
    driver.run("insert into t1 values(3,3)");
    // Simulate starting a transaction by another client
    DbTxnManager txnMgr2 = (DbTxnManager) TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf);
    swapTxnManager(txnMgr2);
    driver2.compileAndRespond("delete from t1 where a = 2");
    // Switch back to client #1 and rebuild the MV, the transaction with the delete statement still open
    swapTxnManager(txnMgr);
    driver.run("alter materialized view mat1 rebuild");
    driver.run("select * from mat1 order by a");
    FetchTask fetchTask = driver.getFetchTask();
    List res = new ArrayList();
    fetchTask.fetch(res);
    Assert.assertEquals(2, res.size());
    Assert.assertEquals("2\t2", res.get(0));
    Assert.assertEquals("3\t3", res.get(1));
    // execute the delete statement and commit the transaction
    swapTxnManager(txnMgr2);
    driver2.run();
    // Rebuild the view again.
    swapTxnManager(txnMgr);
    driver.run("alter materialized view mat1 rebuild");
    driver.run("select * from mat1");
    fetchTask = driver.getFetchTask();
    res = new ArrayList();
    fetchTask.fetch(res);
    Assert.assertEquals(1, res.size());
    Assert.assertEquals("3\t3", res.get(0));
}
Also used : ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ValidTxnList(org.apache.hadoop.hive.common.ValidTxnList) FetchTask(org.apache.hadoop.hive.ql.exec.FetchTask) Test(org.junit.Test)

Example 13 with FetchTask

use of org.apache.hadoop.hive.ql.exec.FetchTask in project hive by apache.

the class TestDbTxnManagerIsolationProperties method basicOpenTxnsNoDirtyRead.

@Test
public void basicOpenTxnsNoDirtyRead() throws Exception {
    driver.run(("drop table if exists gap"));
    driver.run("create table gap (a int, b int) " + "stored as orc TBLPROPERTIES ('transactional'='true')");
    // Create one TXN to read and do not run it
    driver.compileAndRespond("select * from gap");
    long first = txnMgr.getCurrentTxnId();
    DbTxnManager txnMgr2 = (DbTxnManager) TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf);
    swapTxnManager(txnMgr2);
    driver2.compileAndRespond("insert into gap values(1,2)");
    long second = txnMgr2.getCurrentTxnId();
    Assert.assertTrue("Sequence number goes onward", second > first);
    driver2.run();
    // Now we run our read query it should not see the write results of the insert
    swapTxnManager(txnMgr);
    driver.run();
    FetchTask fetchTask = driver.getFetchTask();
    List res = new ArrayList();
    fetchTask.fetch(res);
    Assert.assertEquals("No dirty read", 0, res.size());
}
Also used : ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ValidTxnList(org.apache.hadoop.hive.common.ValidTxnList) FetchTask(org.apache.hadoop.hive.ql.exec.FetchTask) Test(org.junit.Test)

Example 14 with FetchTask

use of org.apache.hadoop.hive.ql.exec.FetchTask in project hive by apache.

the class TestDbTxnManagerIsolationProperties method multipleGapOpenTxnsNoDirtyRead.

@Test
public void multipleGapOpenTxnsNoDirtyRead() throws Exception {
    driver.run(("drop table if exists gap"));
    driver.run("create table gap (a int, b int) " + "stored as orc TBLPROPERTIES ('transactional'='true')");
    // Create some TXN to delete later
    OpenTxnsResponse openTxns = txnHandler.openTxns(new OpenTxnRequest(10, "user", "local"));
    openTxns.getTxn_ids().stream().forEach(txnId -> {
        silentCommitTxn(new CommitTxnRequest(txnId));
    });
    long first = openTxns.getTxn_ids().get(0);
    long last = openTxns.getTxn_ids().get(9);
    // The next one we use for Low water mark
    driver.run("select * from gap");
    DbTxnManager txnMgr2 = (DbTxnManager) TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf);
    swapTxnManager(txnMgr2);
    // Make sure, that the time window is great enough to consider the gap open
    txnHandler.setOpenTxnTimeOutMillis(30000);
    // Create a gap
    deleteTransactionId(first, last);
    CommandProcessorResponse resp = driver2.compileAndRespond("select * from gap");
    long next = txnMgr2.getCurrentTxnId();
    Assert.assertTrue("Sequence number goes onward", next > last);
    ValidTxnList validTxns = txnMgr2.getValidTxns();
    Assert.assertEquals("Expect to see the gap as open", first, (long) validTxns.getMinOpenTxn());
    txnHandler.setOpenTxnTimeOutMillis(1000);
    // Now we cheat and create a transaction with the first sequenceId again imitating a very slow openTxns call
    setBackSequence(first);
    swapTxnManager(txnMgr);
    driver.compileAndRespond("insert into gap values(1,2)");
    next = txnMgr.getCurrentTxnId();
    Assert.assertEquals(first, next);
    driver.run();
    // Now we run our read query it should not see the write results of the insert
    swapTxnManager(txnMgr2);
    driver2.run();
    FetchTask fetchTask = driver2.getFetchTask();
    List res = new ArrayList();
    fetchTask.fetch(res);
    Assert.assertEquals("No dirty read", 0, res.size());
}
Also used : CommitTxnRequest(org.apache.hadoop.hive.metastore.api.CommitTxnRequest) CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) ValidTxnList(org.apache.hadoop.hive.common.ValidTxnList) ArrayList(java.util.ArrayList) OpenTxnRequest(org.apache.hadoop.hive.metastore.api.OpenTxnRequest) ArrayList(java.util.ArrayList) List(java.util.List) ValidTxnList(org.apache.hadoop.hive.common.ValidTxnList) OpenTxnsResponse(org.apache.hadoop.hive.metastore.api.OpenTxnsResponse) FetchTask(org.apache.hadoop.hive.ql.exec.FetchTask) Test(org.junit.Test)

Example 15 with FetchTask

use of org.apache.hadoop.hive.ql.exec.FetchTask in project hive by apache.

the class TestReplicationSemanticAnalyzer method testReplStatusAnalyze.

@Test
public void testReplStatusAnalyze() throws Exception {
    ParseDriver pd = new ParseDriver();
    ASTNode root;
    // Repl status command
    String query = "repl status " + defaultDB;
    root = (ASTNode) pd.parse(query).getChild(0);
    ReplicationSemanticAnalyzer rs = (ReplicationSemanticAnalyzer) SemanticAnalyzerFactory.get(queryState, root);
    rs.analyze(root, new Context(conf));
    FetchTask fetchTask = rs.getFetchTask();
    assertNotNull(fetchTask);
}
Also used : Context(org.apache.hadoop.hive.ql.Context) FetchTask(org.apache.hadoop.hive.ql.exec.FetchTask) Test(org.junit.Test)

Aggregations

FetchTask (org.apache.hadoop.hive.ql.exec.FetchTask)27 ArrayList (java.util.ArrayList)11 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 List (java.util.List)7 ValidTxnList (org.apache.hadoop.hive.common.ValidTxnList)7 Path (org.apache.hadoop.fs.Path)6 FetchWork (org.apache.hadoop.hive.ql.plan.FetchWork)6 TableDesc (org.apache.hadoop.hive.ql.plan.TableDesc)6 Test (org.junit.Test)6 IOException (java.io.IOException)5 HiveConf (org.apache.hadoop.hive.conf.HiveConf)5 Context (org.apache.hadoop.hive.ql.Context)4 CacheUsage (org.apache.hadoop.hive.ql.cache.results.CacheUsage)4 FileSinkOperator (org.apache.hadoop.hive.ql.exec.FileSinkOperator)4 TableScanOperator (org.apache.hadoop.hive.ql.exec.TableScanOperator)4 Operator (org.apache.hadoop.hive.ql.exec.Operator)3 Task (org.apache.hadoop.hive.ql.exec.Task)3 CommandProcessorException (org.apache.hadoop.hive.ql.processors.CommandProcessorException)3 LinkedHashMap (java.util.LinkedHashMap)2 LinkedHashSet (java.util.LinkedHashSet)2