use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.
the class TestNotificationListener method setUp.
@Before
public void setUp() throws Exception {
System.setProperty("java.naming.factory.initial", "org.apache.activemq.jndi.ActiveMQInitialContextFactory");
System.setProperty("java.naming.provider.url", "vm://localhost?broker.persistent=false");
ConnectionFactory connFac = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false");
Connection conn = connFac.createConnection();
conn.start();
// We want message to be sent when session commits, thus we run in
// transacted mode.
Session session = conn.createSession(true, Session.SESSION_TRANSACTED);
Destination hcatTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX);
MessageConsumer consumer1 = session.createConsumer(hcatTopic);
consumer1.setMessageListener(this);
Destination tblTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".mydb.mytbl");
MessageConsumer consumer2 = session.createConsumer(tblTopic);
consumer2.setMessageListener(this);
Destination dbTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".mydb");
MessageConsumer consumer3 = session.createConsumer(dbTopic);
consumer3.setMessageListener(this);
setUpHiveConf();
hiveConf.set(ConfVars.METASTORE_EVENT_LISTENERS.varname, NotificationListener.class.getName());
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
SessionState.start(new CliSessionState(hiveConf));
driver = DriverFactory.newDriver(hiveConf);
client = new HiveMetaStoreClient(hiveConf);
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.
the class CompactorTest method setup.
@Before
public void setup() throws Exception {
conf = new HiveConf();
TxnDbUtil.setConfValues(conf);
TxnDbUtil.cleanDb(conf);
ms = new HiveMetaStoreClient(conf);
txnHandler = TxnUtils.getTxnStore(conf);
tmpdir = new File(Files.createTempDirectory("compactor_test_table_").toString());
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project SQLWindowing by hbutani.
the class HiveUtils method getRowResolver.
public static RowResolver getRowResolver(String db, String table, String alias, HiveConf conf) throws WindowingException {
LOG.info("HiveUtils::getRowResolver invoked on " + table);
try {
HiveMetaStoreClient client = getClient(conf);
db = validateDB(client, db);
org.apache.hadoop.hive.ql.metadata.Table t = Hive.get(conf).getTable(db, table);
StructObjectInspector rowObjectInspector = (StructObjectInspector) t.getDeserializer().getObjectInspector();
RowResolver rwsch = getRowResolver(alias, rowObjectInspector);
for (FieldSchema part_col : t.getPartCols()) {
LOG.trace("Adding partition col: " + part_col);
rwsch.put(alias, part_col.getName(), new ColumnInfo(part_col.getName(), TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), alias, true));
}
Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf).iterator();
// use a list for easy cumtomize
List<VirtualColumn> vcList = new ArrayList<VirtualColumn>();
while (vcs.hasNext()) {
VirtualColumn vc = vcs.next();
rwsch.put(alias, vc.getName(), new ColumnInfo(vc.getName(), vc.getTypeInfo(), alias, true, vc.getIsHidden()));
vcList.add(vc);
}
return rwsch;
} catch (WindowingException w) {
throw w;
} catch (Exception me) {
throw new WindowingException(me);
}
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project SQLWindowing by hbutani.
the class IOUtils method createTableWindowingInput.
@SuppressWarnings("unchecked")
public static WindowingInput createTableWindowingInput(String dbName, String tableName, Configuration conf) throws WindowingException {
try {
HiveMetaStoreClient client = HiveUtils.getClient(conf);
String db = HiveUtils.validateDB(client, dbName);
Table t = HiveUtils.getTable(client, db, tableName);
StorageDescriptor sd = t.getSd();
HiveConf hConf = new HiveConf(conf, IOUtils.class);
JobConf job = new JobConf(hConf);
Class<? extends InputFormat<? extends Writable, ? extends Writable>> inputFormatClass = (Class<? extends InputFormat<? extends Writable, ? extends Writable>>) Class.forName(sd.getInputFormat());
hConf.setClass("mapred.input.format.class", inputFormatClass, InputFormat.class);
hConf.set(INPUT_INPUTFORMAT_CLASS, inputFormatClass.getName());
InputFormat<? extends Writable, ? extends Writable> iFmt = inputFormatClass.newInstance();
if (iFmt instanceof TextInputFormat) {
((TextInputFormat) iFmt).configure(job);
}
Path p = new Path(sd.getLocation());
/*
* Convert the Path in the StorageDescriptor into a Path in the current FileSystem.
* Used in testing: Jobs run on MiniDFSCluster, whereas hive metadata refers to a real cluster.
*/
{
p = makeQualified(p, conf);
}
FileInputFormat.addInputPath(job, p);
InputSplit[] iSplits = iFmt.getSplits(job, 1);
org.apache.hadoop.mapred.RecordReader<Writable, Writable> rdr = (org.apache.hadoop.mapred.RecordReader<Writable, Writable>) iFmt.getRecordReader(iSplits[0], job, Reporter.NULL);
hConf.set(INPUT_PATH, sd.getLocation());
hConf.set(INPUT_KEY_CLASS, rdr.createKey().getClass().getName());
hConf.set(INPUT_VALUE_CLASS, rdr.createValue().getClass().getName());
hConf.set(INPUT_SERDE_CLASS, sd.getSerdeInfo().getSerializationLib());
TableWindowingInput tIn = new TableWindowingInput();
tIn.initialize(null, hConf, MetaStoreUtils.getSchema(t));
return tIn;
} catch (WindowingException w) {
throw w;
} catch (Exception e) {
throw new WindowingException(e);
}
}
use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project ranger by apache.
the class HiveClient method initConnection.
private void initConnection(String userName, String password) throws HadoopException {
if (enableHiveMetastoreLookup) {
try {
HiveConf conf = new HiveConf();
if (!StringUtils.isEmpty(hiveSiteFilePath)) {
File f = new File(hiveSiteFilePath);
if (f.exists()) {
conf.addResource(f.toURI().toURL());
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Hive site conf file path " + hiveSiteFilePath + " does not exists for Hive Metastore lookup");
}
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Hive site conf file path property not found for Hive Metastore lookup");
}
}
hiveClient = new HiveMetaStoreClient(conf);
} catch (HadoopException he) {
String msgDesc = "initConnection: Class or its nullary constructor might not accessible." + "So unable to initiate connection to hive thrift server instance.";
HadoopException hdpException = new HadoopException(msgDesc, he);
hdpException.generateResponseDataMap(false, getMessage(he), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (MalformedURLException e) {
String msgDesc = "initConnection: URL might be malformed." + "So unable to initiate connection to hive thrift server instance.";
HadoopException hdpException = new HadoopException(msgDesc, e);
hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (MetaException e) {
String msgDesc = "initConnection: Meta info is not proper." + "So unable to initiate connection to hive thrift server instance.";
HadoopException hdpException = new HadoopException(msgDesc, e);
hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (Throwable t) {
String msgDesc = "Unable to connect to Hive Thrift Server instance";
HadoopException hdpException = new HadoopException(msgDesc, t);
hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
}
} else {
Properties prop = getConfigHolder().getRangerSection();
String driverClassName = prop.getProperty("jdbc.driverClassName");
String url = prop.getProperty("jdbc.url");
if (driverClassName != null) {
try {
Driver driver = (Driver) Class.forName(driverClassName).newInstance();
DriverManager.registerDriver(driver);
} catch (SQLException e) {
String msgDesc = "initConnection: Caught SQLException while registering " + "Hive driver, so Unable to connect to Hive Thrift Server instance.";
HadoopException hdpException = new HadoopException(msgDesc, e);
hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (IllegalAccessException ilae) {
String msgDesc = "initConnection: Class or its nullary constructor might not accessible." + "So unable to initiate connection to hive thrift server instance.";
HadoopException hdpException = new HadoopException(msgDesc, ilae);
hdpException.generateResponseDataMap(false, getMessage(ilae), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (InstantiationException ie) {
String msgDesc = "initConnection: Class may not have its nullary constructor or " + "may be the instantiation fails for some other reason." + "So unable to initiate connection to hive thrift server instance.";
HadoopException hdpException = new HadoopException(msgDesc, ie);
hdpException.generateResponseDataMap(false, getMessage(ie), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (ExceptionInInitializerError eie) {
String msgDesc = "initConnection: Got ExceptionInInitializerError, " + "The initialization provoked by this method fails." + "So unable to initiate connection to hive thrift server instance.";
HadoopException hdpException = new HadoopException(msgDesc, eie);
hdpException.generateResponseDataMap(false, getMessage(eie), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (SecurityException se) {
String msgDesc = "initConnection: unable to initiate connection to hive thrift server instance," + " The caller's class loader is not the same as or an ancestor " + "of the class loader for the current class and invocation of " + "s.checkPackageAccess() denies access to the package of this class.";
HadoopException hdpException = new HadoopException(msgDesc, se);
hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (Throwable t) {
String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance, " + "please provide valid value of field : {jdbc.driverClassName}.";
HadoopException hdpException = new HadoopException(msgDesc, t);
hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, "jdbc.driverClassName");
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
}
}
try {
if (userName == null && password == null) {
con = DriverManager.getConnection(url);
} else {
String decryptedPwd = null;
try {
decryptedPwd = PasswordUtils.decryptPassword(password);
} catch (Exception ex) {
LOG.info("Password decryption failed; trying Hive connection with received password string");
decryptedPwd = null;
} finally {
if (decryptedPwd == null) {
decryptedPwd = password;
}
}
con = DriverManager.getConnection(url, userName, decryptedPwd);
}
} catch (SQLException e) {
String msgDesc = "Unable to connect to Hive Thrift Server instance.";
HadoopException hdpException = new HadoopException(msgDesc, e);
hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (SecurityException se) {
String msgDesc = "Unable to connect to Hive Thrift Server instance.";
HadoopException hdpException = new HadoopException(msgDesc, se);
hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + ERR_MSG, null, null);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
} catch (Throwable t) {
String msgDesc = "Unable to connect to Hive Thrift Server instance";
HadoopException hdpException = new HadoopException(msgDesc, t);
hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, url);
if (LOG.isDebugEnabled()) {
LOG.debug(msgDesc, hdpException);
}
throw hdpException;
}
}
}
Aggregations