Search in sources :

Example 1 with CliSessionState

use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.

the class QTestUtil method cliInit.

public String cliInit(String tname, boolean recreate) throws Exception {
    if (recreate) {
        cleanUp(tname);
        createSources(tname);
    }
    HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
    Utilities.clearWorkMap(conf);
    CliSessionState ss = createSessionState();
    assert ss != null;
    ss.in = System.in;
    String outFileExtension = getOutFileExtension(tname);
    String stdoutName = null;
    if (outDir != null) {
        // TODO: why is this needed?
        File qf = new File(outDir, tname);
        stdoutName = qf.getName().concat(outFileExtension);
    } else {
        stdoutName = tname + outFileExtension;
    }
    File outf = new File(logDir, stdoutName);
    OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
    if (qSortQuerySet.contains(tname)) {
        ss.out = new SortPrintStream(fo, "UTF-8");
    } else if (qHashQuerySet.contains(tname)) {
        ss.out = new DigestPrintStream(fo, "UTF-8");
    } else if (qSortNHashQuerySet.contains(tname)) {
        ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
    } else {
        ss.out = new PrintStream(fo, true, "UTF-8");
    }
    ss.err = new CachingPrintStream(fo, true, "UTF-8");
    ss.setIsSilent(true);
    SessionState oldSs = SessionState.get();
    boolean canReuseSession = !qNoSessionReuseQuerySet.contains(tname);
    if (oldSs != null && canReuseSession && clusterType.getCoreClusterType() == CoreClusterType.TEZ) {
        // Copy the tezSessionState from the old CliSessionState.
        tezSessionState = oldSs.getTezSession();
        oldSs.setTezSession(null);
        ss.setTezSession(tezSessionState);
        oldSs.close();
    }
    if (oldSs != null && clusterType.getCoreClusterType() == CoreClusterType.SPARK) {
        sparkSession = oldSs.getSparkSession();
        ss.setSparkSession(sparkSession);
        oldSs.setSparkSession(null);
        oldSs.close();
    }
    if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
        oldSs.out.close();
    }
    SessionState.start(ss);
    cliDriver = new CliDriver();
    if (tname.equals("init_file.q")) {
        ss.initFiles.add(AbstractCliConfig.HIVE_ROOT + "/data/scripts/test_init_file.sql");
    }
    cliDriver.processInitFiles(ss);
    return outf.getAbsolutePath();
}
Also used : SortAndDigestPrintStream(org.apache.hadoop.hive.common.io.SortAndDigestPrintStream) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) DigestPrintStream(org.apache.hadoop.hive.common.io.DigestPrintStream) SortPrintStream(org.apache.hadoop.hive.common.io.SortPrintStream) PrintStream(java.io.PrintStream) TezSessionState(org.apache.hadoop.hive.ql.exec.tez.TezSessionState) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) SessionState(org.apache.hadoop.hive.ql.session.SessionState) BufferedOutputStream(java.io.BufferedOutputStream) FileOutputStream(java.io.FileOutputStream) OutputStream(java.io.OutputStream) SortAndDigestPrintStream(org.apache.hadoop.hive.common.io.SortAndDigestPrintStream) DigestPrintStream(org.apache.hadoop.hive.common.io.DigestPrintStream) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) FileOutputStream(java.io.FileOutputStream) SortAndDigestPrintStream(org.apache.hadoop.hive.common.io.SortAndDigestPrintStream) File(java.io.File) BufferedOutputStream(java.io.BufferedOutputStream) SortPrintStream(org.apache.hadoop.hive.common.io.SortPrintStream) CliDriver(org.apache.hadoop.hive.cli.CliDriver)

Example 2 with CliSessionState

use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.

the class TestMarkPartition method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    System.setProperty("hive.metastore.event.clean.freq", "2");
    System.setProperty("hive.metastore.event.expiry.duration", "5");
    hiveConf = new HiveConf(this.getClass());
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    SessionState.start(new CliSessionState(hiveConf));
}
Also used : HiveConf(org.apache.hadoop.hive.conf.HiveConf) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState)

Example 3 with CliSessionState

use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.

the class TestMetaStoreEndFunctionListener method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    System.setProperty("hive.metastore.event.listeners", DummyListener.class.getName());
    System.setProperty("hive.metastore.pre.event.listeners", DummyPreListener.class.getName());
    System.setProperty("hive.metastore.end.function.listeners", DummyEndFunctionListener.class.getName());
    int port = MetaStoreUtils.findFreePort();
    MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
    hiveConf = new HiveConf(this.getClass());
    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    SessionState.start(new CliSessionState(hiveConf));
    msc = new HiveMetaStoreClient(hiveConf);
    driver = new Driver(hiveConf);
}
Also used : Driver(org.apache.hadoop.hive.ql.Driver) HiveConf(org.apache.hadoop.hive.conf.HiveConf) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState)

Example 4 with CliSessionState

use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.

the class TestMetaStoreEventListener method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    System.setProperty("hive.metastore.event.listeners", DummyListener.class.getName());
    System.setProperty("hive.metastore.pre.event.listeners", DummyPreListener.class.getName());
    int port = MetaStoreUtils.findFreePort();
    MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
    hiveConf = new HiveConf(this.getClass());
    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    SessionState.start(new CliSessionState(hiveConf));
    msc = new HiveMetaStoreClient(hiveConf);
    driver = new Driver(hiveConf);
    driver.run("drop database if exists " + dbName + " cascade");
    DummyListener.notifyList.clear();
    DummyPreListener.notifyList.clear();
}
Also used : Driver(org.apache.hadoop.hive.ql.Driver) HiveConf(org.apache.hadoop.hive.conf.HiveConf) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState)

Example 5 with CliSessionState

use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.

the class TestReplicationScenarios method setUpBeforeClass.

// if verifySetup is set to true, all the test setup we do will perform additional
// verifications as well, which is useful to verify that our setup occurred
// correctly when developing and debugging tests. These verifications, however
// do not test any new functionality for replication, and thus, are not relevant
// for testing replication itself. For steady state, we want this to be false.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
    hconf = new HiveConf(TestReplicationScenarios.class);
    String metastoreUri = System.getProperty("test." + HiveConf.ConfVars.METASTOREURIS.varname);
    if (metastoreUri != null) {
        hconf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUri);
        useExternalMS = true;
        return;
    }
    hconf.setVar(HiveConf.ConfVars.METASTORE_TRANSACTIONAL_EVENT_LISTENERS, // turn on db notification listener on metastore
    DBNOTIF_LISTENER_CLASSNAME);
    hconf.setBoolVar(HiveConf.ConfVars.REPLCMENABLED, true);
    hconf.setBoolVar(HiveConf.ConfVars.FIRE_EVENTS_FOR_DML, true);
    hconf.setVar(HiveConf.ConfVars.REPLCMDIR, TEST_PATH + "/cmroot/");
    msPort = MetaStoreUtils.startMetaStore(hconf);
    hconf.setVar(HiveConf.ConfVars.REPLDIR, TEST_PATH + "/hrepl/");
    hconf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort);
    hconf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hconf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hconf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
    System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
    Path testPath = new Path(TEST_PATH);
    FileSystem fs = FileSystem.get(testPath.toUri(), hconf);
    fs.mkdirs(testPath);
    driver = new Driver(hconf);
    SessionState.start(new CliSessionState(hconf));
    metaStoreClient = new HiveMetaStoreClient(hconf);
}
Also used : Path(org.apache.hadoop.fs.Path) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) FileSystem(org.apache.hadoop.fs.FileSystem) HiveConf(org.apache.hadoop.hive.conf.HiveConf) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) BeforeClass(org.junit.BeforeClass)

Aggregations

CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)69 HiveConf (org.apache.hadoop.hive.conf.HiveConf)48 Before (org.junit.Before)19 File (java.io.File)16 HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)14 SessionState (org.apache.hadoop.hive.ql.session.SessionState)11 Test (org.junit.Test)11 Path (org.apache.hadoop.fs.Path)9 BeforeClass (org.junit.BeforeClass)9 Driver (org.apache.hadoop.hive.ql.Driver)8 IDriver (org.apache.hadoop.hive.ql.IDriver)7 CliDriver (org.apache.hadoop.hive.cli.CliDriver)6 UnsupportedEncodingException (java.io.UnsupportedEncodingException)5 LogInitializationException (org.apache.hadoop.hive.common.LogUtils.LogInitializationException)5 SessionStream (org.apache.hadoop.hive.common.io.SessionStream)5 CommandProcessorException (org.apache.hadoop.hive.ql.processors.CommandProcessorException)5 FileNotFoundException (java.io.FileNotFoundException)4 PrintStream (java.io.PrintStream)4 Map (java.util.Map)4 FileSystem (org.apache.hadoop.fs.FileSystem)4