Search in sources :

Example 21 with CliSessionState

use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.

the class TestAuthorizationPreEventListener method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, AuthorizationPreEventListener.class.getName());
    System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname, DummyHiveMetastoreAuthorizationProvider.class.getName());
    System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname, HadoopDefaultMetastoreAuthenticator.class.getName());
    int port = MetaStoreTestUtils.startMetaStoreWithRetry();
    clientHiveConf = new HiveConf(this.getClass());
    clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
    clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    SessionState.start(new CliSessionState(clientHiveConf));
    msc = new HiveMetaStoreClient(clientHiveConf);
    driver = DriverFactory.newDriver(clientHiveConf);
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) HiveConf(org.apache.hadoop.hive.conf.HiveConf) AuthorizationPreEventListener(org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState)

Example 22 with CliSessionState

use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.

the class TestClientSideAuthorizationProvider method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    // Turn off metastore-side authorization
    System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, "");
    int port = MetaStoreTestUtils.startMetaStoreWithRetry();
    clientHiveConf = new HiveConf(this.getClass());
    // Turn on client-side authorization
    clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
    clientHiveConf.set(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER.varname, getAuthorizationProvider());
    clientHiveConf.set(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname, InjectableDummyAuthenticator.class.getName());
    clientHiveConf.set(HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS.varname, "");
    clientHiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict");
    clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
    clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    ugi = Utils.getUGI();
    SessionState.start(new CliSessionState(clientHiveConf));
    msc = new HiveMetaStoreClient(clientHiveConf);
    driver = DriverFactory.newDriver(clientHiveConf);
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) HiveConf(org.apache.hadoop.hive.conf.HiveConf) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState)

Example 23 with CliSessionState

use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.

the class QTestUtil method startSessionState.

private CliSessionState startSessionState(boolean canReuseSession) throws IOException {
    HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
    String execEngine = conf.get("hive.execution.engine");
    conf.set("hive.execution.engine", "mr");
    CliSessionState ss = new CliSessionState(conf);
    assert ss != null;
    ss.in = System.in;
    ss.out = System.out;
    ss.err = System.out;
    SessionState oldSs = SessionState.get();
    if (oldSs != null && canReuseSession && clusterType.getCoreClusterType() == CoreClusterType.TEZ) {
        // Copy the tezSessionState from the old CliSessionState.
        TezSessionState tezSessionState = oldSs.getTezSession();
        ss.setTezSession(tezSessionState);
        oldSs.setTezSession(null);
        oldSs.close();
    }
    if (oldSs != null && clusterType.getCoreClusterType() == CoreClusterType.SPARK) {
        sparkSession = oldSs.getSparkSession();
        ss.setSparkSession(sparkSession);
        oldSs.setSparkSession(null);
        oldSs.close();
    }
    if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
        oldSs.out.close();
    }
    if (oldSs != null) {
        oldSs.close();
    }
    SessionState.start(ss);
    isSessionStateStarted = true;
    conf.set("hive.execution.engine", execEngine);
    return ss;
}
Also used : TezSessionState(org.apache.hadoop.hive.ql.exec.tez.TezSessionState) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) SessionState(org.apache.hadoop.hive.ql.session.SessionState) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) TezSessionState(org.apache.hadoop.hive.ql.exec.tez.TezSessionState)

Example 24 with CliSessionState

use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.

the class TestHCatLoaderEncryption method setup.

@Before
public void setup() throws Exception {
    File f = new File(TEST_WAREHOUSE_DIR);
    if (f.exists()) {
        FileUtil.fullyDelete(f);
    }
    if (!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
        throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
    }
    HiveConf hiveConf = new HiveConf(this.getClass());
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
    hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    String s = hiveConf.get("hdfs.minidfs.basedir");
    if (s == null || s.length() <= 0) {
        // return System.getProperty("test.build.data", "build/test/data") + "/dfs/";
        hiveConf.set("hdfs.minidfs.basedir", System.getProperty("test.build.data", "build/test/data") + "_" + System.currentTimeMillis() + "_" + salt.getAndIncrement() + "/dfs/");
    }
    initEncryptionShim(hiveConf);
    String encryptedTablePath = TEST_WAREHOUSE_DIR + "/encryptedTable";
    SessionState.start(new CliSessionState(hiveConf));
    driver = DriverFactory.newDriver(hiveConf);
    SessionState.get().out = System.out;
    createTable(BASIC_TABLE, "a int, b string");
    createTableInSpecifiedPath(ENCRYPTED_TABLE, "a int, b string", encryptedTablePath, driver);
    associateEncryptionZoneWithPath(encryptedTablePath);
    int LOOP_SIZE = 3;
    String[] input = new String[LOOP_SIZE * LOOP_SIZE];
    basicInputData = new HashMap<Integer, Pair<Integer, String>>();
    int k = 0;
    for (int i = 1; i <= LOOP_SIZE; i++) {
        String si = i + "";
        for (int j = 1; j <= LOOP_SIZE; j++) {
            String sj = "S" + j + "S";
            input[k] = si + "\t" + sj;
            basicInputData.put(k, new Pair<Integer, String>(i, sj));
            k++;
        }
    }
    HcatTestUtils.createTestDataFile(BASIC_FILE_NAME, input);
    PigServer server = new PigServer(ExecType.LOCAL);
    server.setBatchOn();
    int i = 0;
    server.registerQuery("A = load '" + BASIC_FILE_NAME + "' as (a:int, b:chararray);", ++i);
    server.registerQuery("store A into '" + ENCRYPTED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer();", ++i);
    server.executeBatch();
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) PigServer(org.apache.pig.PigServer) HiveConf(org.apache.hadoop.hive.conf.HiveConf) File(java.io.File) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) Pair(org.apache.hive.hcatalog.data.Pair) Before(org.junit.Before)

Example 25 with CliSessionState

use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.

the class TestMsgBusConnection method before.

@Before
public void before() throws Exception {
    broker = new BrokerService();
    // configure the broker
    broker.addConnector("tcp://localhost:61616?broker.persistent=false");
    broker.start();
    System.setProperty("java.naming.factory.initial", "org.apache.activemq.jndi.ActiveMQInitialContextFactory");
    System.setProperty("java.naming.provider.url", "tcp://localhost:61616");
    connectClient();
    HiveConf hiveConf = new HiveConf(this.getClass());
    hiveConf.set(ConfVars.METASTORE_EVENT_LISTENERS.varname, NotificationListener.class.getName());
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.METASTOREURIS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    hiveConf.set(HCatConstants.HCAT_MSGBUS_TOPIC_PREFIX, "planetlab.hcat");
    SessionState.start(new CliSessionState(hiveConf));
    driver = DriverFactory.newDriver(hiveConf);
}
Also used : HiveConf(org.apache.hadoop.hive.conf.HiveConf) BrokerService(org.apache.activemq.broker.BrokerService) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) Before(org.junit.Before)

Aggregations

CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)58 HiveConf (org.apache.hadoop.hive.conf.HiveConf)44 File (java.io.File)12 HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)12 Before (org.junit.Before)12 SessionState (org.apache.hadoop.hive.ql.session.SessionState)11 BeforeClass (org.junit.BeforeClass)9 Path (org.apache.hadoop.fs.Path)8 Driver (org.apache.hadoop.hive.ql.Driver)8 PrintStream (java.io.PrintStream)7 UnsupportedEncodingException (java.io.UnsupportedEncodingException)6 LogInitializationException (org.apache.hadoop.hive.common.LogUtils.LogInitializationException)6 Map (java.util.Map)5 CliDriver (org.apache.hadoop.hive.cli.CliDriver)5 CachingPrintStream (org.apache.hadoop.hive.common.io.CachingPrintStream)5 FileNotFoundException (java.io.FileNotFoundException)4 FileSystem (org.apache.hadoop.fs.FileSystem)4 AuthorizationPreEventListener (org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener)4 BufferedOutputStream (java.io.BufferedOutputStream)3 FileOutputStream (java.io.FileOutputStream)3