Search in sources :

Example 51 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestTxnCommands method testAddAndDropConstraintAdvancingWriteIds.

@Test
public void testAddAndDropConstraintAdvancingWriteIds() throws Exception {
    String tableName = "constraints_table";
    hiveConf.setBoolean("hive.stats.autogather", true);
    hiveConf.setBoolean("hive.stats.column.autogather", true);
    // Need to close the thread local Hive object so that configuration change is reflected to HMS.
    Hive.closeCurrent();
    runStatementOnDriver("drop table if exists " + tableName);
    runStatementOnDriver(String.format("create table %s (a int, b string) stored as orc " + "TBLPROPERTIES ('transactional'='true', 'transactional_properties'='insert_only')", tableName));
    runStatementOnDriver(String.format("insert into %s (a) values (0)", tableName));
    IMetaStoreClient msClient = new HiveMetaStoreClient(hiveConf);
    String validWriteIds = msClient.getValidWriteIds("default." + tableName).toString();
    LOG.info("ValidWriteIds before add constraint::" + validWriteIds);
    Assert.assertEquals("default.constraints_table:1:9223372036854775807::", validWriteIds);
    runStatementOnDriver(String.format("alter table %s  ADD CONSTRAINT a_PK PRIMARY KEY (`a`) DISABLE NOVALIDATE", tableName));
    validWriteIds = msClient.getValidWriteIds("default." + tableName).toString();
    LOG.info("ValidWriteIds after add constraint primary key::" + validWriteIds);
    Assert.assertEquals("default.constraints_table:2:9223372036854775807::", validWriteIds);
    runStatementOnDriver(String.format("alter table %s CHANGE COLUMN b b STRING NOT NULL", tableName));
    validWriteIds = msClient.getValidWriteIds("default." + tableName).toString();
    LOG.info("ValidWriteIds after add constraint not null::" + validWriteIds);
    Assert.assertEquals("default.constraints_table:3:9223372036854775807::", validWriteIds);
    runStatementOnDriver(String.format("alter table %s ADD CONSTRAINT check1 CHECK (a <= 25)", tableName));
    validWriteIds = msClient.getValidWriteIds("default." + tableName).toString();
    LOG.info("ValidWriteIds after add constraint check::" + validWriteIds);
    Assert.assertEquals("default.constraints_table:4:9223372036854775807::", validWriteIds);
    runStatementOnDriver(String.format("alter table %s ADD CONSTRAINT unique1 UNIQUE (a, b) DISABLE", tableName));
    validWriteIds = msClient.getValidWriteIds("default." + tableName).toString();
    LOG.info("ValidWriteIds after add constraint unique::" + validWriteIds);
    Assert.assertEquals("default.constraints_table:5:9223372036854775807::", validWriteIds);
    LOG.info("ValidWriteIds before drop constraint::" + validWriteIds);
    runStatementOnDriver(String.format("alter table %s  DROP CONSTRAINT a_PK", tableName));
    validWriteIds = msClient.getValidWriteIds("default." + tableName).toString();
    Assert.assertEquals("default.constraints_table:6:9223372036854775807::", validWriteIds);
    LOG.info("ValidWriteIds after drop constraint primary key::" + validWriteIds);
    runStatementOnDriver(String.format("alter table %s  DROP CONSTRAINT check1", tableName));
    validWriteIds = msClient.getValidWriteIds("default." + tableName).toString();
    Assert.assertEquals("default.constraints_table:7:9223372036854775807::", validWriteIds);
    LOG.info("ValidWriteIds after drop constraint check::" + validWriteIds);
    runStatementOnDriver(String.format("alter table %s  DROP CONSTRAINT unique1", tableName));
    validWriteIds = msClient.getValidWriteIds("default." + tableName).toString();
    Assert.assertEquals("default.constraints_table:8:9223372036854775807::", validWriteIds);
    LOG.info("ValidWriteIds after drop constraint unique::" + validWriteIds);
    runStatementOnDriver(String.format("alter table %s CHANGE COLUMN b b STRING", tableName));
    validWriteIds = msClient.getValidWriteIds("default." + tableName).toString();
    Assert.assertEquals("default.constraints_table:9:9223372036854775807::", validWriteIds);
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) Test(org.junit.Test)

Example 52 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestHiveMetaStoreChecker method setUp.

@Before
public void setUp() throws Exception {
    hive = Hive.get();
    hive.getConf().set(MetastoreConf.ConfVars.FS_HANDLER_THREADS_COUNT.getVarname(), "15");
    hive.getConf().set(MetastoreConf.ConfVars.MSCK_PATH_VALIDATION.getVarname(), "throw");
    msc = new HiveMetaStoreClient(hive.getConf());
    checker = new HiveMetaStoreChecker(msc, hive.getConf());
    hive.getConf().setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    HiveConf.setBoolVar(hive.getConf(), HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
    SessionState ss = SessionState.start(hive.getConf());
    ss.initTxnMgr(hive.getConf());
    partCols = new ArrayList<>();
    partCols.add(new FieldSchema(partDateName, serdeConstants.STRING_TYPE_NAME, ""));
    partCols.add(new FieldSchema(partCityName, serdeConstants.STRING_TYPE_NAME, ""));
    parts = new ArrayList<>();
    Map<String, String> part1 = new HashMap<>();
    part1.put(partDateName, "2008-01-01");
    part1.put(partCityName, "london");
    parts.add(part1);
    Map<String, String> part2 = new HashMap<>();
    part2.put(partDateName, "2008-01-02");
    part2.put(partCityName, "stockholm");
    parts.add(part2);
    // cleanup just in case something is left over from previous run
    dropDbTable();
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) HiveMetaStoreChecker(org.apache.hadoop.hive.metastore.HiveMetaStoreChecker) Before(org.junit.Before)

Example 53 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestMsckDropPartitionsInBatches method setupClass.

@BeforeClass
public static void setupClass() throws Exception {
    hiveConf = new HiveConf(TestMsckCreatePartitionsInBatches.class);
    hiveConf.setIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE, 5);
    hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    SessionState.start(hiveConf);
    db = new HiveMetaStoreClient(hiveConf);
    msck = new Msck(false, false);
    msck.init(Msck.getMsckConf(hiveConf));
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Msck(org.apache.hadoop.hive.metastore.Msck) BeforeClass(org.junit.BeforeClass)

Example 54 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestStatsUpdaterThread method testNoStatsUpdateForReplTable.

private void testNoStatsUpdateForReplTable(String tblNamePrefix, String txnProperty) throws Exception {
    String tblWOStats = tblNamePrefix + "_repl_trgt_nostats";
    String tblWithStats = tblNamePrefix + "_repl_trgt_stats";
    String ptnTblWOStats = tblNamePrefix + "_ptn_repl_trgt_nostats";
    String ptnTblWithStats = tblNamePrefix + "_ptn_repl_trgt_stats";
    String dbName = ss.getCurrentDatabase();
    executeQuery("alter database " + dbName + " set dbproperties('" + ReplConst.TARGET_OF_REPLICATION + "'='true')");
    StatsUpdaterThread su = createUpdater();
    su.startWorkers();
    IMetaStoreClient msClient = new HiveMetaStoreClient(hiveConf);
    hiveConf.setBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER, false);
    hiveConf.setBoolVar(HiveConf.ConfVars.HIVESTATSCOLAUTOGATHER, false);
    executeQuery("create table " + tblWOStats + "(i int, s string) " + txnProperty);
    executeQuery("insert into " + tblWOStats + "(i, s) values (1, 'test')");
    verifyStatsUpToDate(tblWOStats, Lists.newArrayList("i"), msClient, false);
    executeQuery("create table " + ptnTblWOStats + "(s string) partitioned by (i int) " + txnProperty);
    executeQuery("insert into " + ptnTblWOStats + "(i, s) values (1, 'test')");
    executeQuery("insert into " + ptnTblWOStats + "(i, s) values (2, 'test2')");
    executeQuery("insert into " + ptnTblWOStats + "(i, s) values (3, 'test3')");
    verifyPartStatsUpToDate(3, 1, msClient, ptnTblWOStats, false);
    executeQuery("create table " + tblWithStats + "(i int, s string)" + txnProperty);
    executeQuery("insert into " + tblWithStats + "(i, s) values (1, 'test')");
    executeQuery("analyze table " + tblWithStats + " compute statistics for columns");
    verifyStatsUpToDate(tblWithStats, Lists.newArrayList("i"), msClient, true);
    executeQuery("create table " + ptnTblWithStats + "(s string) partitioned by (i int) " + txnProperty);
    executeQuery("insert into " + ptnTblWithStats + "(i, s) values (1, 'test')");
    executeQuery("insert into " + ptnTblWithStats + "(i, s) values (2, 'test2')");
    executeQuery("insert into " + ptnTblWithStats + "(i, s) values (3, 'test3')");
    executeQuery("analyze table " + ptnTblWithStats + " compute statistics for columns");
    verifyPartStatsUpToDate(3, 1, msClient, ptnTblWithStats, true);
    assertFalse(su.runOneIteration());
    Assert.assertEquals(0, su.getQueueLength());
    verifyStatsUpToDate(tblWOStats, Lists.newArrayList("i"), msClient, false);
    verifyStatsUpToDate(tblWithStats, Lists.newArrayList("i"), msClient, true);
    verifyPartStatsUpToDate(3, 1, msClient, ptnTblWOStats, false);
    verifyPartStatsUpToDate(3, 1, msClient, ptnTblWithStats, true);
    executeQuery("alter database " + dbName + " set dbproperties('" + ReplConst.TARGET_OF_REPLICATION + "'='')");
    executeQuery("drop table " + tblWOStats);
    executeQuery("drop table " + tblWithStats);
    executeQuery("drop table " + ptnTblWOStats);
    executeQuery("drop table " + ptnTblWithStats);
    msClient.close();
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient)

Example 55 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class CompactorTest method setup.

protected void setup(HiveConf conf) throws Exception {
    this.conf = conf;
    MetastoreConf.setTimeVar(conf, MetastoreConf.ConfVars.TXN_OPENTXN_TIMEOUT, 2, TimeUnit.SECONDS);
    TestTxnDbUtil.setConfValues(conf);
    TestTxnDbUtil.cleanDb(conf);
    TestTxnDbUtil.prepDb(conf);
    ms = new HiveMetaStoreClient(conf);
    txnHandler = TxnUtils.getTxnStore(conf);
    tmpdir = new File(Files.createTempDirectory("compactor_test_table_").toString());
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) File(java.io.File)

Aggregations

HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)141 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)81 Test (org.junit.Test)78 Table (org.apache.hadoop.hive.metastore.api.Table)60 FileSystem (org.apache.hadoop.fs.FileSystem)57 Path (org.apache.hadoop.fs.Path)45 HiveConf (org.apache.hadoop.hive.conf.HiveConf)31 Before (org.junit.Before)23 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)18 FileStatus (org.apache.hadoop.fs.FileStatus)17 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)16 File (java.io.File)12 IOException (java.io.IOException)12 HiveStreamingConnection (org.apache.hive.streaming.HiveStreamingConnection)12 ArrayList (java.util.ArrayList)11 TxnStore (org.apache.hadoop.hive.metastore.txn.TxnStore)10 StreamingConnection (org.apache.hive.streaming.StreamingConnection)10 List (java.util.List)9 HashMap (java.util.HashMap)8 CompactionRequest (org.apache.hadoop.hive.metastore.api.CompactionRequest)8