use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.
the class TestHadoopAuthBridge23 method obtainTokenAndAddIntoUGI.
private void obtainTokenAndAddIntoUGI(UserGroupInformation clientUgi, String tokenSig) throws Exception {
String tokenStrForm = getDelegationTokenStr(clientUgi, clientUgi);
Token<DelegationTokenIdentifier> t = new Token<DelegationTokenIdentifier>();
t.decodeFromUrlString(tokenStrForm);
//check whether the username in the token is what we expect
DelegationTokenIdentifier d = new DelegationTokenIdentifier();
d.readFields(new DataInputStream(new ByteArrayInputStream(t.getIdentifier())));
Assert.assertTrue("Usernames don't match", clientUgi.getShortUserName().equals(d.getUser().getShortUserName()));
if (tokenSig != null) {
conf.setVar(HiveConf.ConfVars.METASTORE_TOKEN_SIGNATURE, tokenSig);
t.setService(new Text(tokenSig));
}
//add the token to the clientUgi for securely talking to the metastore
clientUgi.addToken(t);
//Create the metastore client as the clientUgi. Doing so this
//way will give the client access to the token that was added earlier
//in the clientUgi
HiveMetaStoreClient hiveClient = clientUgi.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {
public HiveMetaStoreClient run() throws Exception {
HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(conf);
return hiveClient;
}
});
Assert.assertTrue("Couldn't connect to metastore", hiveClient != null);
//try out some metastore operations
createDBAndVerifyExistence(hiveClient);
hiveClient.close();
//Now cancel the delegation token
HiveMetaStore.cancelDelegationToken(tokenStrForm);
//now metastore connection should fail
hiveClient = clientUgi.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {
public HiveMetaStoreClient run() {
try {
return new HiveMetaStoreClient(conf);
} catch (MetaException e) {
return null;
}
}
});
Assert.assertTrue("Expected metastore operations to fail", hiveClient == null);
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.
the class TestTableIterable method testNumReturned.
@Test
public void testNumReturned() throws MetaException, InvalidOperationException, UnknownDBException, TException {
HiveMetaStoreClient msc = mock(HiveMetaStoreClient.class);
// create a mocked metastore client that returns 3 table objects every time it is called
// will use same size for TableIterable batch fetch size
List<Table> threeTables = Arrays.asList(new Table(), new Table(), new Table());
when(msc.getTableObjectsByName(anyString(), anyListOf(String.class))).thenReturn(threeTables);
List<String> tableNames = Arrays.asList("a", "b", "c", "d", "e", "f");
TableIterable tIterable = new TableIterable(msc, "dummy", tableNames, threeTables.size());
tIterable.iterator();
Iterator<Table> tIter = tIterable.iterator();
int size = 0;
while (tIter.hasNext()) {
size++;
tIter.next();
}
assertEquals("Number of table objects returned", size, tableNames.size());
verify(msc).getTableObjectsByName("dummy", Arrays.asList("a", "b", "c"));
verify(msc).getTableObjectsByName("dummy", Arrays.asList("d", "e", "f"));
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project SQLWindowing by hbutani.
the class InputTranslation method translate.
private static HiveTableDef translate(QueryDef qDef, HiveTableSpec spec, HiveTableDef def) throws WindowingException {
def = def == null ? new HiveTableDef() : def;
HiveMetaStoreClient hiveMSC = qDef.getTranslationInfo().getHiveMSClient();
Hive hive = qDef.getTranslationInfo().getHive();
def.setSpec(spec);
if (spec.getDbName() == null) {
spec.setDbName(hive.getCurrentDatabase());
}
try {
Table t = hiveMSC.getTable(spec.getDbName(), spec.getTableName());
qDef.getTranslationInfo().setTbl(TranslateUtils.getHiveMetaTable(hive, t.getDbName(), def.getHiveTableSpec().getTableName()));
StorageDescriptor sd = t.getSd();
def.setInputFormatClassName(sd.getInputFormat());
def.setTableSerdeClassName(sd.getSerdeInfo().getSerializationLib());
def.setTableSerdeProps(setupSerdeProps(qDef, sd));
def.setLocation(sd.getLocation());
Deserializer serde = HiveUtils.getDeserializer(qDef.getTranslationInfo().getHiveCfg(), t);
def.setOI((StructObjectInspector) serde.getObjectInspector());
def.setSerde((SerDe) serde);
} catch (WindowingException we) {
throw we;
} catch (Exception he) {
throw new WindowingException(he);
}
return def;
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.
the class TestReplicationScenarios method setUpBeforeClass.
// if verifySetup is set to true, all the test setup we do will perform additional
// verifications as well, which is useful to verify that our setup occurred
// correctly when developing and debugging tests. These verifications, however
// do not test any new functionality for replication, and thus, are not relevant
// for testing replication itself. For steady state, we want this to be false.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
hconf = new HiveConf(TestReplicationScenarios.class);
String metastoreUri = System.getProperty("test." + HiveConf.ConfVars.METASTOREURIS.varname);
if (metastoreUri != null) {
hconf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUri);
return;
}
hconf.setVar(HiveConf.ConfVars.METASTORE_TRANSACTIONAL_EVENT_LISTENERS, // turn on db notification listener on metastore
DBNOTIF_LISTENER_CLASSNAME);
hconf.setBoolVar(HiveConf.ConfVars.REPLCMENABLED, true);
hconf.setBoolVar(HiveConf.ConfVars.FIRE_EVENTS_FOR_DML, true);
hconf.setVar(HiveConf.ConfVars.REPLCMDIR, TEST_PATH + "/cmroot/");
proxySettingName = "hadoop.proxyuser." + Utils.getUGI().getShortUserName() + ".hosts";
hconf.set(proxySettingName, "*");
MetaStoreTestUtils.startMetaStoreWithRetry(hconf);
hconf.setVar(HiveConf.ConfVars.REPLDIR, TEST_PATH + "/hrepl/");
hconf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
hconf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hconf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hconf.set(HiveConf.ConfVars.HIVE_IN_TEST_REPL.varname, "true");
hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
hconf.set(HiveConf.ConfVars.HIVE_TXN_MANAGER.varname, "org.apache.hadoop.hive.ql.lockmgr.DummyTxnManager");
hconf.set(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL.varname, "org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore");
hconf.setBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES, true);
System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
Path testPath = new Path(TEST_PATH);
FileSystem fs = FileSystem.get(testPath.toUri(), hconf);
fs.mkdirs(testPath);
driver = DriverFactory.newDriver(hconf);
SessionState.start(new CliSessionState(hconf));
metaStoreClient = new HiveMetaStoreClient(hconf);
FileUtils.deleteDirectory(new File("metastore_db2"));
HiveConf hconfMirrorServer = new HiveConf();
hconfMirrorServer.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, "jdbc:derby:;databaseName=metastore_db2;create=true");
MetaStoreTestUtils.startMetaStoreWithRetry(hconfMirrorServer);
hconfMirror = new HiveConf(hconf);
String thriftUri = MetastoreConf.getVar(hconfMirrorServer, MetastoreConf.ConfVars.THRIFT_URIS);
MetastoreConf.setVar(hconfMirror, MetastoreConf.ConfVars.THRIFT_URIS, thriftUri);
driverMirror = DriverFactory.newDriver(hconfMirror);
metaStoreClientMirror = new HiveMetaStoreClient(hconfMirror);
ObjectStore.setTwoMetastoreTesting(true);
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.
the class TestNotificationListener method setUp.
@Before
public void setUp() throws Exception {
System.setProperty("java.naming.factory.initial", "org.apache.activemq.jndi.ActiveMQInitialContextFactory");
System.setProperty("java.naming.provider.url", "vm://localhost?broker.persistent=false");
ConnectionFactory connFac = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false");
Connection conn = connFac.createConnection();
conn.start();
// We want message to be sent when session commits, thus we run in
// transacted mode.
Session session = conn.createSession(true, Session.SESSION_TRANSACTED);
Destination hcatTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX);
MessageConsumer consumer1 = session.createConsumer(hcatTopic);
consumer1.setMessageListener(this);
Destination tblTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".mydb.mytbl");
MessageConsumer consumer2 = session.createConsumer(tblTopic);
consumer2.setMessageListener(this);
Destination dbTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".mydb");
MessageConsumer consumer3 = session.createConsumer(dbTopic);
consumer3.setMessageListener(this);
setUpHiveConf();
hiveConf.set(ConfVars.METASTORE_EVENT_LISTENERS.varname, NotificationListener.class.getName());
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
SessionState.start(new CliSessionState(hiveConf));
driver = DriverFactory.newDriver(hiveConf);
client = new HiveMetaStoreClient(hiveConf);
}
Aggregations