Search in sources :

Example 1 with SessionStream

use of org.apache.hadoop.hive.common.io.SessionStream in project hive by apache.

the class TestHCatLoaderEncryption method setup.

@Before
public void setup() throws Exception {
    File f = new File(TEST_WAREHOUSE_DIR);
    if (f.exists()) {
        FileUtil.fullyDelete(f);
    }
    if (!(new File(TEST_WAREHOUSE_DIR).mkdirs())) {
        throw new RuntimeException("Could not create " + TEST_WAREHOUSE_DIR);
    }
    HiveConf hiveConf = new HiveConf(this.getClass());
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, TEST_WAREHOUSE_DIR);
    hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    String s = hiveConf.get("hdfs.minidfs.basedir");
    if (s == null || s.length() <= 0) {
        // return System.getProperty("test.build.data", "build/test/data") + "/dfs/";
        hiveConf.set("hdfs.minidfs.basedir", System.getProperty("test.build.data", "build/test/data") + "_" + System.currentTimeMillis() + "_" + salt.getAndIncrement() + "/dfs/");
    }
    initEncryptionShim(hiveConf);
    String encryptedTablePath = TEST_WAREHOUSE_DIR + "/encryptedTable";
    SessionState.start(new CliSessionState(hiveConf));
    driver = DriverFactory.newDriver(hiveConf);
    SessionState.get().out = new SessionStream(System.out);
    createTable(BASIC_TABLE, "a int, b string");
    createTableInSpecifiedPath(ENCRYPTED_TABLE, "a int, b string", encryptedTablePath, driver);
    associateEncryptionZoneWithPath(encryptedTablePath);
    int LOOP_SIZE = 3;
    String[] input = new String[LOOP_SIZE * LOOP_SIZE];
    basicInputData = new HashMap<Integer, Pair<Integer, String>>();
    int k = 0;
    for (int i = 1; i <= LOOP_SIZE; i++) {
        String si = i + "";
        for (int j = 1; j <= LOOP_SIZE; j++) {
            String sj = "S" + j + "S";
            input[k] = si + "\t" + sj;
            basicInputData.put(k, new Pair<Integer, String>(i, sj));
            k++;
        }
    }
    HcatTestUtils.createTestDataFile(BASIC_FILE_NAME, input);
    PigServer server = HCatBaseTest.createPigServer(false);
    server.setBatchOn();
    int i = 0;
    server.registerQuery("A = load '" + BASIC_FILE_NAME + "' as (a:int, b:chararray);", ++i);
    server.registerQuery("store A into '" + ENCRYPTED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer();", ++i);
    server.executeBatch();
}
Also used : CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) SessionStream(org.apache.hadoop.hive.common.io.SessionStream) PigServer(org.apache.pig.PigServer) HiveConf(org.apache.hadoop.hive.conf.HiveConf) File(java.io.File) Pair(org.apache.hive.hcatalog.data.Pair) Before(org.junit.Before)

Example 2 with SessionStream

use of org.apache.hadoop.hive.common.io.SessionStream in project hive by apache.

the class QTestUtil method startSessionState.

public CliSessionState startSessionState(boolean canReuseSession) throws IOException {
    HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
    // FIXME: check why mr is needed for starting a session state from conf
    String execEngine = conf.get("hive.execution.engine");
    conf.set("hive.execution.engine", "mr");
    CliSessionState ss = new CliSessionState(conf);
    ss.in = System.in;
    ss.out = new SessionStream(System.out);
    ss.err = new SessionStream(System.out);
    SessionState oldSs = SessionState.get();
    miniClusters.restartSessions(canReuseSession, ss, oldSs);
    closeSession(oldSs);
    SessionState.start(ss);
    isSessionStateStarted = true;
    conf.set("hive.execution.engine", execEngine);
    return ss;
}
Also used : SessionStream(org.apache.hadoop.hive.common.io.SessionStream) SessionState(org.apache.hadoop.hive.ql.session.SessionState) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState)

Example 3 with SessionStream

use of org.apache.hadoop.hive.common.io.SessionStream in project hive by apache.

the class TestCliDriverMethods method testprocessInitFiles.

@Test
public void testprocessInitFiles() throws Exception {
    String oldHiveHome = System.getenv("HIVE_HOME");
    String oldHiveConfDir = System.getenv("HIVE_CONF_DIR");
    File homeFile = File.createTempFile("test", "hive");
    String tmpDir = homeFile.getParentFile().getAbsoluteFile() + File.separator + "TestCliDriverMethods";
    homeFile.delete();
    FileUtils.deleteDirectory(new File(tmpDir));
    homeFile = new File(tmpDir + File.separator + "bin" + File.separator + CliDriver.HIVERCFILE);
    homeFile.getParentFile().mkdirs();
    homeFile.createNewFile();
    FileUtils.write(homeFile, "-- init hive file for test ");
    setEnv("HIVE_HOME", homeFile.getParentFile().getParentFile().getAbsolutePath());
    setEnv("HIVE_CONF_DIR", homeFile.getParentFile().getAbsolutePath());
    CliSessionState sessionState = new CliSessionState(new HiveConf());
    ByteArrayOutputStream data = new ByteArrayOutputStream();
    sessionState.err = new SessionStream(data);
    sessionState.out = new SessionStream(System.out);
    sessionState.setIsQtestLogging(true);
    try {
        CliSessionState.start(sessionState);
        CliDriver cliDriver = new CliDriver();
        cliDriver.processInitFiles(sessionState);
        assertTrue(data.toString().contains("Putting the global hiverc in $HIVE_HOME/bin/.hiverc is deprecated. " + "Please use $HIVE_CONF_DIR/.hiverc instead."));
        FileUtils.write(homeFile, "bla bla bla");
        // if init file contains incorrect row
        try {
            cliDriver.processInitFiles(sessionState);
            fail("should be exit");
        } catch (ExitException e) {
            assertEquals(40000, e.getStatus());
        }
        setEnv("HIVE_HOME", null);
        try {
            cliDriver.processInitFiles(sessionState);
            fail("should be exit");
        } catch (ExitException e) {
            assertEquals(40000, e.getStatus());
        }
    } finally {
        // restore data
        setEnv("HIVE_HOME", oldHiveHome);
        setEnv("HIVE_CONF_DIR", oldHiveConfDir);
        FileUtils.deleteDirectory(new File(tmpDir));
    }
    File f = File.createTempFile("hive", "test");
    FileUtils.write(f, "bla bla bla");
    try {
        sessionState.initFiles = Arrays.asList(new String[] { f.getAbsolutePath() });
        CliDriver cliDriver = new CliDriver();
        cliDriver.processInitFiles(sessionState);
        fail("should be exit");
    } catch (ExitException e) {
        assertEquals(40000, e.getStatus());
        assertTrue(data.toString().contains("cannot recognize input near 'bla' 'bla' 'bla'"));
    }
}
Also used : SessionStream(org.apache.hadoop.hive.common.io.SessionStream) HiveConf(org.apache.hadoop.hive.conf.HiveConf) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) ByteArrayOutputStream(java.io.ByteArrayOutputStream) File(java.io.File) Test(org.junit.Test)

Example 4 with SessionStream

use of org.apache.hadoop.hive.common.io.SessionStream in project hive by apache.

the class TestCliDriverMethods method testThatCliDriverDoesNotStripComments.

// Test that CliDriver does not strip comments starting with '--'
@Test
public void testThatCliDriverDoesNotStripComments() throws Exception {
    // We need to overwrite System.out and System.err as that is what is used in ShellCmdExecutor
    // So save old values...
    PrintStream oldOut = System.out;
    PrintStream oldErr = System.err;
    // Capture stdout and stderr
    ByteArrayOutputStream dataOut = new ByteArrayOutputStream();
    SessionStream out = new SessionStream(dataOut);
    System.setOut(out);
    ByteArrayOutputStream dataErr = new ByteArrayOutputStream();
    SessionStream err = new SessionStream(dataErr);
    System.setErr(err);
    CliSessionState ss = new CliSessionState(new HiveConf());
    ss.out = out;
    ss.err = err;
    // Save output as yo cannot print it while System.out and System.err are weird
    String message;
    String errors;
    try {
        CliSessionState.start(ss);
        CliDriver cliDriver = new CliDriver();
        // issue a command with bad options
        cliDriver.processCmd("!ls --abcdefghijklmnopqrstuvwxyz123456789");
        assertTrue("Comments with '--; should not have been stripped, so command should fail", false);
    } catch (CommandProcessorException e) {
    // this is expected to happen
    } finally {
        // restore System.out and System.err
        System.setOut(oldOut);
        System.setErr(oldErr);
    }
    message = dataOut.toString("UTF-8");
    errors = dataErr.toString("UTF-8");
    assertTrue("Comments with '--; should not have been stripped," + " so we should have got an error in the output: '" + errors + "'.", errors.contains("option"));
    // message kept around in for debugging
    assertNotNull(message);
}
Also used : PrintStream(java.io.PrintStream) SessionStream(org.apache.hadoop.hive.common.io.SessionStream) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) Test(org.junit.Test)

Example 5 with SessionStream

use of org.apache.hadoop.hive.common.io.SessionStream in project hive by apache.

the class TestCliDriverMethods method testQuit.

/**
 * Test commands exit and quit
 */
@Test
public void testQuit() throws Exception {
    CliSessionState ss = new CliSessionState(new HiveConf());
    ss.err = new SessionStream(System.err);
    ss.out = new SessionStream(System.out);
    try {
        CliSessionState.start(ss);
        CliDriver cliDriver = new CliDriver();
        cliDriver.processCmd("quit");
        fail("should be exit");
    } catch (ExitException e) {
        assertEquals(0, e.getStatus());
    } catch (Exception e) {
        throw e;
    }
    try {
        CliSessionState.start(ss);
        CliDriver cliDriver = new CliDriver();
        cliDriver.processCmd("exit");
        fail("should be exit");
    } catch (ExitException e) {
        assertEquals(0, e.getStatus());
    }
}
Also used : SessionStream(org.apache.hadoop.hive.common.io.SessionStream) HiveConf(org.apache.hadoop.hive.conf.HiveConf) IOException(java.io.IOException) CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) Test(org.junit.Test)

Aggregations

SessionStream (org.apache.hadoop.hive.common.io.SessionStream)14 HiveConf (org.apache.hadoop.hive.conf.HiveConf)9 UnsupportedEncodingException (java.io.UnsupportedEncodingException)5 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)5 Test (org.junit.Test)5 ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 IOException (java.io.IOException)3 LogInitializationException (org.apache.hadoop.hive.common.LogUtils.LogInitializationException)3 CommandProcessorException (org.apache.hadoop.hive.ql.processors.CommandProcessorException)3 SessionState (org.apache.hadoop.hive.ql.session.SessionState)3 File (java.io.File)2 IDriver (org.apache.hadoop.hive.ql.IDriver)2 Before (org.junit.Before)2 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)2 FileNotFoundException (java.io.FileNotFoundException)1 FileOutputStream (java.io.FileOutputStream)1 PrintStream (java.io.PrintStream)1 Map (java.util.Map)1 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1 CommandLine (org.apache.commons.cli.CommandLine)1