Search in sources :

Example 66 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestHadoopAuthBridge23 method obtainTokenAndAddIntoUGI.

private void obtainTokenAndAddIntoUGI(UserGroupInformation clientUgi, String tokenSig) throws Exception {
    String tokenStrForm = getDelegationTokenStr(clientUgi, clientUgi);
    Token<DelegationTokenIdentifier> t = new Token<DelegationTokenIdentifier>();
    t.decodeFromUrlString(tokenStrForm);
    //check whether the username in the token is what we expect
    DelegationTokenIdentifier d = new DelegationTokenIdentifier();
    d.readFields(new DataInputStream(new ByteArrayInputStream(t.getIdentifier())));
    Assert.assertTrue("Usernames don't match", clientUgi.getShortUserName().equals(d.getUser().getShortUserName()));
    if (tokenSig != null) {
        conf.setVar(HiveConf.ConfVars.METASTORE_TOKEN_SIGNATURE, tokenSig);
        t.setService(new Text(tokenSig));
    }
    //add the token to the clientUgi for securely talking to the metastore
    clientUgi.addToken(t);
    //Create the metastore client as the clientUgi. Doing so this
    //way will give the client access to the token that was added earlier
    //in the clientUgi
    HiveMetaStoreClient hiveClient = clientUgi.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {

        public HiveMetaStoreClient run() throws Exception {
            HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(conf);
            return hiveClient;
        }
    });
    Assert.assertTrue("Couldn't connect to metastore", hiveClient != null);
    //try out some metastore operations
    createDBAndVerifyExistence(hiveClient);
    hiveClient.close();
    //Now cancel the delegation token
    HiveMetaStore.cancelDelegationToken(tokenStrForm);
    //now metastore connection should fail
    hiveClient = clientUgi.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {

        public HiveMetaStoreClient run() {
            try {
                return new HiveMetaStoreClient(conf);
            } catch (MetaException e) {
                return null;
            }
        }
    });
    Assert.assertTrue("Expected metastore operations to fail", hiveClient == null);
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) ByteArrayInputStream(java.io.ByteArrayInputStream) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) DataInputStream(java.io.DataInputStream) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) TTransportException(org.apache.thrift.transport.TTransportException) AuthorizationException(org.apache.hadoop.security.authorize.AuthorizationException) IOException(java.io.IOException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 67 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestTableIterable method testNumReturned.

@Test
public void testNumReturned() throws MetaException, InvalidOperationException, UnknownDBException, TException {
    HiveMetaStoreClient msc = mock(HiveMetaStoreClient.class);
    // create a mocked metastore client that returns 3 table objects every time it is called
    // will use same size for TableIterable batch fetch size
    List<Table> threeTables = Arrays.asList(new Table(), new Table(), new Table());
    when(msc.getTableObjectsByName(anyString(), anyListOf(String.class))).thenReturn(threeTables);
    List<String> tableNames = Arrays.asList("a", "b", "c", "d", "e", "f");
    TableIterable tIterable = new TableIterable(msc, "dummy", tableNames, threeTables.size());
    tIterable.iterator();
    Iterator<Table> tIter = tIterable.iterator();
    int size = 0;
    while (tIter.hasNext()) {
        size++;
        tIter.next();
    }
    assertEquals("Number of table objects returned", size, tableNames.size());
    verify(msc).getTableObjectsByName("dummy", Arrays.asList("a", "b", "c"));
    verify(msc).getTableObjectsByName("dummy", Arrays.asList("d", "e", "f"));
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) Table(org.apache.hadoop.hive.metastore.api.Table) Test(org.junit.Test)

Example 68 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project SQLWindowing by hbutani.

the class InputTranslation method translate.

private static HiveTableDef translate(QueryDef qDef, HiveTableSpec spec, HiveTableDef def) throws WindowingException {
    def = def == null ? new HiveTableDef() : def;
    HiveMetaStoreClient hiveMSC = qDef.getTranslationInfo().getHiveMSClient();
    Hive hive = qDef.getTranslationInfo().getHive();
    def.setSpec(spec);
    if (spec.getDbName() == null) {
        spec.setDbName(hive.getCurrentDatabase());
    }
    try {
        Table t = hiveMSC.getTable(spec.getDbName(), spec.getTableName());
        qDef.getTranslationInfo().setTbl(TranslateUtils.getHiveMetaTable(hive, t.getDbName(), def.getHiveTableSpec().getTableName()));
        StorageDescriptor sd = t.getSd();
        def.setInputFormatClassName(sd.getInputFormat());
        def.setTableSerdeClassName(sd.getSerdeInfo().getSerializationLib());
        def.setTableSerdeProps(setupSerdeProps(qDef, sd));
        def.setLocation(sd.getLocation());
        Deserializer serde = HiveUtils.getDeserializer(qDef.getTranslationInfo().getHiveCfg(), t);
        def.setOI((StructObjectInspector) serde.getObjectInspector());
        def.setSerde((SerDe) serde);
    } catch (WindowingException we) {
        throw we;
    } catch (Exception he) {
        throw new WindowingException(he);
    }
    return def;
}
Also used : Hive(org.apache.hadoop.hive.ql.metadata.Hive) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) Table(org.apache.hadoop.hive.metastore.api.Table) Deserializer(org.apache.hadoop.hive.serde2.Deserializer) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) WindowingException(com.sap.hadoop.windowing.WindowingException) WindowingException(com.sap.hadoop.windowing.WindowingException) HiveTableDef(com.sap.hadoop.windowing.query2.definition.HiveTableDef)

Example 69 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestReplicationScenarios method setUpBeforeClass.

// if verifySetup is set to true, all the test setup we do will perform additional
// verifications as well, which is useful to verify that our setup occurred
// correctly when developing and debugging tests. These verifications, however
// do not test any new functionality for replication, and thus, are not relevant
// for testing replication itself. For steady state, we want this to be false.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
    hconf = new HiveConf(TestReplicationScenarios.class);
    String metastoreUri = System.getProperty("test." + HiveConf.ConfVars.METASTOREURIS.varname);
    if (metastoreUri != null) {
        hconf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUri);
        return;
    }
    hconf.setVar(HiveConf.ConfVars.METASTORE_TRANSACTIONAL_EVENT_LISTENERS, // turn on db notification listener on metastore
    DBNOTIF_LISTENER_CLASSNAME);
    hconf.setBoolVar(HiveConf.ConfVars.REPLCMENABLED, true);
    hconf.setBoolVar(HiveConf.ConfVars.FIRE_EVENTS_FOR_DML, true);
    hconf.setVar(HiveConf.ConfVars.REPLCMDIR, TEST_PATH + "/cmroot/");
    proxySettingName = "hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts";
    hconf.set(proxySettingName, "*");
    MetaStoreTestUtils.startMetaStoreWithRetry(hconf);
    hconf.setVar(HiveConf.ConfVars.REPLDIR, TEST_PATH + "/hrepl/");
    hconf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hconf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hconf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hconf.set(HiveConf.ConfVars.HIVE_IN_TEST_REPL.varname, "true");
    hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    hconf.set(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager");
    hconf.set(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL.varname, "org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore");
    hconf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true);
    System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
    System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
    Path testPath = new Path(TEST_PATH);
    FileSystem fs = FileSystem.get(testPath.toUri(), hconf);
    fs.mkdirs(testPath);
    driver = DriverFactory.newDriver(hconf);
    SessionState.start(new CliSessionState(hconf));
    metaStoreClient = new HiveMetaStoreClient(hconf);
    FileUtils.deleteDirectory(new File("metastore_db2"));
    HiveConf hconfMirrorServer = new HiveConf();
    hconfMirrorServer.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, "jdbc:derby:;databaseName=metastore_db2;create=true");
    MetaStoreTestUtils.startMetaStoreWithRetry(hconfMirrorServer);
    hconfMirror = new HiveConf(hconf);
    String thriftUri = MetastoreConf.getVar(hconfMirrorServer, MetastoreConf.ConfVars.THRIFT_URIS);
    MetastoreConf.setVar(hconfMirror, MetastoreConf.ConfVars.THRIFT_URIS, thriftUri);
    driverMirror = DriverFactory.newDriver(hconfMirror);
    metaStoreClientMirror = new HiveMetaStoreClient(hconfMirror);
    ObjectStore.setTwoMetastoreTesting(true);
}
Also used : Path(org.apache.hadoop.fs.Path) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) FileSystem(org.apache.hadoop.fs.FileSystem) HiveConf(org.apache.hadoop.hive.conf.HiveConf) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) File(java.io.File) BeforeClass(org.junit.BeforeClass)

Example 70 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestNotificationListener method setUp.

@Before
public void setUp() throws Exception {
    System.setProperty("java.naming.factory.initial", "org.apache.activemq.jndi.ActiveMQInitialContextFactory");
    System.setProperty("java.naming.provider.url", "vm://localhost?broker.persistent=false");
    ConnectionFactory connFac = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false");
    Connection conn = connFac.createConnection();
    conn.start();
    // We want message to be sent when session commits, thus we run in
    // transacted mode.
    Session session = conn.createSession(true, Session.SESSION_TRANSACTED);
    Destination hcatTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX);
    MessageConsumer consumer1 = session.createConsumer(hcatTopic);
    consumer1.setMessageListener(this);
    Destination tblTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".mydb.mytbl");
    MessageConsumer consumer2 = session.createConsumer(tblTopic);
    consumer2.setMessageListener(this);
    Destination dbTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".mydb");
    MessageConsumer consumer3 = session.createConsumer(dbTopic);
    consumer3.setMessageListener(this);
    setUpHiveConf();
    hiveConf.set(ConfVars.METASTORE_EVENT_LISTENERS.varname, NotificationListener.class.getName());
    hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    SessionState.start(new CliSessionState(hiveConf));
    driver = DriverFactory.newDriver(hiveConf);
    client = new HiveMetaStoreClient(hiveConf);
}
Also used : ActiveMQConnectionFactory(org.apache.activemq.ActiveMQConnectionFactory) Destination(javax.jms.Destination) ActiveMQConnectionFactory(org.apache.activemq.ActiveMQConnectionFactory) ConnectionFactory(javax.jms.ConnectionFactory) MessageConsumer(javax.jms.MessageConsumer) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) Connection(javax.jms.Connection) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) Session(javax.jms.Session) Before(org.junit.Before)

Aggregations

HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)141 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)81 Test (org.junit.Test)78 Table (org.apache.hadoop.hive.metastore.api.Table)60 FileSystem (org.apache.hadoop.fs.FileSystem)57 Path (org.apache.hadoop.fs.Path)45 HiveConf (org.apache.hadoop.hive.conf.HiveConf)31 Before (org.junit.Before)23 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)18 FileStatus (org.apache.hadoop.fs.FileStatus)17 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)16 File (java.io.File)12 IOException (java.io.IOException)12 HiveStreamingConnection (org.apache.hive.streaming.HiveStreamingConnection)12 ArrayList (java.util.ArrayList)11 TxnStore (org.apache.hadoop.hive.metastore.txn.TxnStore)10 StreamingConnection (org.apache.hive.streaming.StreamingConnection)10 List (java.util.List)9 HashMap (java.util.HashMap)8 CompactionRequest (org.apache.hadoop.hive.metastore.api.CompactionRequest)8