Search in sources :

Example 36 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestNotificationListener method setUp.

@Before
public void setUp() throws Exception {
    System.setProperty("java.naming.factory.initial", "org.apache.activemq.jndi.ActiveMQInitialContextFactory");
    System.setProperty("java.naming.provider.url", "vm://localhost?broker.persistent=false");
    ConnectionFactory connFac = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false");
    Connection conn = connFac.createConnection();
    conn.start();
    // We want message to be sent when session commits, thus we run in
    // transacted mode.
    Session session = conn.createSession(true, Session.SESSION_TRANSACTED);
    Destination hcatTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX);
    MessageConsumer consumer1 = session.createConsumer(hcatTopic);
    consumer1.setMessageListener(this);
    Destination tblTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".mydb.mytbl");
    MessageConsumer consumer2 = session.createConsumer(tblTopic);
    consumer2.setMessageListener(this);
    Destination dbTopic = session.createTopic(HCatConstants.HCAT_DEFAULT_TOPIC_PREFIX + ".mydb");
    MessageConsumer consumer3 = session.createConsumer(dbTopic);
    consumer3.setMessageListener(this);
    setUpHiveConf();
    hiveConf.set(ConfVars.METASTORE_EVENT_LISTENERS.varname, NotificationListener.class.getName());
    hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    SessionState.start(new CliSessionState(hiveConf));
    driver = DriverFactory.newDriver(hiveConf);
    client = new HiveMetaStoreClient(hiveConf);
}
Also used : ActiveMQConnectionFactory(org.apache.activemq.ActiveMQConnectionFactory) Destination(javax.jms.Destination) ActiveMQConnectionFactory(org.apache.activemq.ActiveMQConnectionFactory) ConnectionFactory(javax.jms.ConnectionFactory) MessageConsumer(javax.jms.MessageConsumer) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) Connection(javax.jms.Connection) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) Session(javax.jms.Session) Before(org.junit.Before)

Example 37 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class CompactorTest method setup.

@Before
public void setup() throws Exception {
    conf = new HiveConf();
    TxnDbUtil.setConfValues(conf);
    TxnDbUtil.cleanDb(conf);
    ms = new HiveMetaStoreClient(conf);
    txnHandler = TxnUtils.getTxnStore(conf);
    tmpdir = new File(Files.createTempDirectory("compactor_test_table_").toString());
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) HiveConf(org.apache.hadoop.hive.conf.HiveConf) File(java.io.File) Before(org.junit.Before)

Example 38 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project SQLWindowing by hbutani.

the class HiveUtils method getRowResolver.

public static RowResolver getRowResolver(String db, String table, String alias, HiveConf conf) throws WindowingException {
    LOG.info("HiveUtils::getRowResolver invoked on " + table);
    try {
        HiveMetaStoreClient client = getClient(conf);
        db = validateDB(client, db);
        org.apache.hadoop.hive.ql.metadata.Table t = Hive.get(conf).getTable(db, table);
        StructObjectInspector rowObjectInspector = (StructObjectInspector) t.getDeserializer().getObjectInspector();
        RowResolver rwsch = getRowResolver(alias, rowObjectInspector);
        for (FieldSchema part_col : t.getPartCols()) {
            LOG.trace("Adding partition col: " + part_col);
            rwsch.put(alias, part_col.getName(), new ColumnInfo(part_col.getName(), TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), alias, true));
        }
        Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf).iterator();
        // use a list for easy cumtomize
        List<VirtualColumn> vcList = new ArrayList<VirtualColumn>();
        while (vcs.hasNext()) {
            VirtualColumn vc = vcs.next();
            rwsch.put(alias, vc.getName(), new ColumnInfo(vc.getName(), vc.getTypeInfo(), alias, true, vc.getIsHidden()));
            vcList.add(vc);
        }
        return rwsch;
    } catch (WindowingException w) {
        throw w;
    } catch (Exception me) {
        throw new WindowingException(me);
    }
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) RowResolver(org.apache.hadoop.hive.ql.parse.RowResolver) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) WindowingException(com.sap.hadoop.windowing.WindowingException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) WindowingException(com.sap.hadoop.windowing.WindowingException) VirtualColumn(org.apache.hadoop.hive.ql.metadata.VirtualColumn) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 39 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project SQLWindowing by hbutani.

the class IOUtils method createTableWindowingInput.

@SuppressWarnings("unchecked")
public static WindowingInput createTableWindowingInput(String dbName, String tableName, Configuration conf) throws WindowingException {
    try {
        HiveMetaStoreClient client = HiveUtils.getClient(conf);
        String db = HiveUtils.validateDB(client, dbName);
        Table t = HiveUtils.getTable(client, db, tableName);
        StorageDescriptor sd = t.getSd();
        HiveConf hConf = new HiveConf(conf, IOUtils.class);
        JobConf job = new JobConf(hConf);
        Class<? extends InputFormat<? extends Writable, ? extends Writable>> inputFormatClass = (Class<? extends InputFormat<? extends Writable, ? extends Writable>>) Class.forName(sd.getInputFormat());
        hConf.setClass("mapred.input.format.class", inputFormatClass, InputFormat.class);
        hConf.set(INPUT_INPUTFORMAT_CLASS, inputFormatClass.getName());
        InputFormat<? extends Writable, ? extends Writable> iFmt = inputFormatClass.newInstance();
        if (iFmt instanceof TextInputFormat) {
            ((TextInputFormat) iFmt).configure(job);
        }
        Path p = new Path(sd.getLocation());
        /*
			 * Convert the Path in the StorageDescriptor into a Path in the current FileSystem.
			 * Used in testing: Jobs run on MiniDFSCluster, whereas hive metadata refers to a real cluster.
			 */
        {
            p = makeQualified(p, conf);
        }
        FileInputFormat.addInputPath(job, p);
        InputSplit[] iSplits = iFmt.getSplits(job, 1);
        org.apache.hadoop.mapred.RecordReader<Writable, Writable> rdr = (org.apache.hadoop.mapred.RecordReader<Writable, Writable>) iFmt.getRecordReader(iSplits[0], job, Reporter.NULL);
        hConf.set(INPUT_PATH, sd.getLocation());
        hConf.set(INPUT_KEY_CLASS, rdr.createKey().getClass().getName());
        hConf.set(INPUT_VALUE_CLASS, rdr.createValue().getClass().getName());
        hConf.set(INPUT_SERDE_CLASS, sd.getSerdeInfo().getSerializationLib());
        TableWindowingInput tIn = new TableWindowingInput();
        tIn.initialize(null, hConf, MetaStoreUtils.getSchema(t));
        return tIn;
    } catch (WindowingException w) {
        throw w;
    } catch (Exception e) {
        throw new WindowingException(e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) Table(org.apache.hadoop.hive.metastore.api.Table) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Writable(org.apache.hadoop.io.Writable) IOException(java.io.IOException) WindowingException(com.sap.hadoop.windowing.WindowingException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) TextInputFormat(org.apache.hadoop.mapred.TextInputFormat) TextInputFormat(org.apache.hadoop.mapred.TextInputFormat) InputFormat(org.apache.hadoop.mapred.InputFormat) FileInputFormat(org.apache.hadoop.mapred.FileInputFormat) WindowingException(com.sap.hadoop.windowing.WindowingException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) JobConf(org.apache.hadoop.mapred.JobConf) InputSplit(org.apache.hadoop.mapred.InputSplit)

Example 40 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project ranger by apache.

the class HiveClient method initConnection.

private void initConnection(String userName, String password) throws HadoopException {
    if (enableHiveMetastoreLookup) {
        try {
            HiveConf conf = new HiveConf();
            if (!StringUtils.isEmpty(hiveSiteFilePath)) {
                File f = new File(hiveSiteFilePath);
                if (f.exists()) {
                    conf.addResource(f.toURI().toURL());
                } else {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("Hive site conf file path " + hiveSiteFilePath + " does not exists for Hive Metastore lookup");
                    }
                }
            } else {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Hive site conf file path property not found for Hive Metastore lookup");
                }
            }
            hiveClient = new HiveMetaStoreClient(conf);
        } catch (HadoopException he) {
            String msgDesc = "initConnection: Class or its nullary constructor might not accessible." + "So unable to initiate connection to hive thrift server instance.";
            HadoopException hdpException = new HadoopException(msgDesc, he);
            hdpException.generateResponseDataMap(false, getMessage(he), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        } catch (MalformedURLException e) {
            String msgDesc = "initConnection: URL might be malformed." + "So unable to initiate connection to hive thrift server instance.";
            HadoopException hdpException = new HadoopException(msgDesc, e);
            hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        } catch (MetaException e) {
            String msgDesc = "initConnection: Meta info is not proper." + "So unable to initiate connection to hive thrift server instance.";
            HadoopException hdpException = new HadoopException(msgDesc, e);
            hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        } catch (Throwable t) {
            String msgDesc = "Unable to connect to Hive Thrift Server instance";
            HadoopException hdpException = new HadoopException(msgDesc, t);
            hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        }
    } else {
        Properties prop = getConfigHolder().getRangerSection();
        String driverClassName = prop.getProperty("jdbc.driverClassName");
        String url = prop.getProperty("jdbc.url");
        if (driverClassName != null) {
            try {
                Driver driver = (Driver) Class.forName(driverClassName).newInstance();
                DriverManager.registerDriver(driver);
            } catch (SQLException e) {
                String msgDesc = "initConnection: Caught SQLException while registering " + "Hive driver, so Unable to connect to Hive Thrift Server instance.";
                HadoopException hdpException = new HadoopException(msgDesc, e);
                hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            } catch (IllegalAccessException ilae) {
                String msgDesc = "initConnection: Class or its nullary constructor might not accessible." + "So unable to initiate connection to hive thrift server instance.";
                HadoopException hdpException = new HadoopException(msgDesc, ilae);
                hdpException.generateResponseDataMap(false, getMessage(ilae), msgDesc + ERR_MSG, null, null);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            } catch (InstantiationException ie) {
                String msgDesc = "initConnection: Class may not have its nullary constructor or " + "may be the instantiation fails for some other reason." + "So unable to initiate connection to hive thrift server instance.";
                HadoopException hdpException = new HadoopException(msgDesc, ie);
                hdpException.generateResponseDataMap(false, getMessage(ie), msgDesc + ERR_MSG, null, null);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            } catch (ExceptionInInitializerError eie) {
                String msgDesc = "initConnection: Got ExceptionInInitializerError, " + "The initialization provoked by this method fails." + "So unable to initiate connection to hive thrift server instance.";
                HadoopException hdpException = new HadoopException(msgDesc, eie);
                hdpException.generateResponseDataMap(false, getMessage(eie), msgDesc + ERR_MSG, null, null);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            } catch (SecurityException se) {
                String msgDesc = "initConnection: unable to initiate connection to hive thrift server instance," + " The caller's class loader is not the same as or an ancestor " + "of the class loader for the current class and invocation of " + "s.checkPackageAccess() denies access to the package of this class.";
                HadoopException hdpException = new HadoopException(msgDesc, se);
                hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + ERR_MSG, null, null);
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            } catch (Throwable t) {
                String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance, " + "please provide valid value of field : {jdbc.driverClassName}.";
                HadoopException hdpException = new HadoopException(msgDesc, t);
                hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, "jdbc.driverClassName");
                if (LOG.isDebugEnabled()) {
                    LOG.debug(msgDesc, hdpException);
                }
                throw hdpException;
            }
        }
        try {
            if (userName == null && password == null) {
                con = DriverManager.getConnection(url);
            } else {
                String decryptedPwd = null;
                try {
                    decryptedPwd = PasswordUtils.decryptPassword(password);
                } catch (Exception ex) {
                    LOG.info("Password decryption failed; trying Hive connection with received password string");
                    decryptedPwd = null;
                } finally {
                    if (decryptedPwd == null) {
                        decryptedPwd = password;
                    }
                }
                con = DriverManager.getConnection(url, userName, decryptedPwd);
            }
        } catch (SQLException e) {
            String msgDesc = "Unable to connect to Hive Thrift Server instance.";
            HadoopException hdpException = new HadoopException(msgDesc, e);
            hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        } catch (SecurityException se) {
            String msgDesc = "Unable to connect to Hive Thrift Server instance.";
            HadoopException hdpException = new HadoopException(msgDesc, se);
            hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + ERR_MSG, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        } catch (Throwable t) {
            String msgDesc = "Unable to connect to Hive Thrift Server instance";
            HadoopException hdpException = new HadoopException(msgDesc, t);
            hdpException.generateResponseDataMap(false, getMessage(t), msgDesc + ERR_MSG, null, url);
            if (LOG.isDebugEnabled()) {
                LOG.debug(msgDesc, hdpException);
            }
            throw hdpException;
        }
    }
}
Also used : MalformedURLException(java.net.MalformedURLException) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) SQLException(java.sql.SQLException) Driver(java.sql.Driver) HadoopException(org.apache.ranger.plugin.client.HadoopException) Properties(java.util.Properties) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) SQLTimeoutException(java.sql.SQLTimeoutException) SQLException(java.sql.SQLException) MalformedURLException(java.net.MalformedURLException) TException(org.apache.thrift.TException) HadoopException(org.apache.ranger.plugin.client.HadoopException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) File(java.io.File) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Aggregations

HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)45 HiveConf (org.apache.hadoop.hive.conf.HiveConf)20 Path (org.apache.hadoop.fs.Path)14 Table (org.apache.hadoop.hive.metastore.api.Table)14 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)12 FileSystem (org.apache.hadoop.fs.FileSystem)11 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)11 HiveEndPoint (org.apache.hive.hcatalog.streaming.HiveEndPoint)9 Test (org.junit.Test)9 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)8 FileStatus (org.apache.hadoop.fs.FileStatus)8 CompactionRequest (org.apache.hadoop.hive.metastore.api.CompactionRequest)8 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)8 TxnStore (org.apache.hadoop.hive.metastore.txn.TxnStore)8 File (java.io.File)7 WindowingException (com.sap.hadoop.windowing.WindowingException)6 DelimitedInputWriter (org.apache.hive.hcatalog.streaming.DelimitedInputWriter)6 StreamingConnection (org.apache.hive.hcatalog.streaming.StreamingConnection)6 Before (org.junit.Before)6 BeforeClass (org.junit.BeforeClass)5