Search in sources :

Example 76 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class QTestUtil method cleanupFromFile.

private void cleanupFromFile() throws IOException {
    File cleanupFile = new File(cleanupScript);
    if (cleanupFile.isFile()) {
        String cleanupCommands = FileUtils.readFileToString(cleanupFile);
        LOG.info("Cleanup (" + cleanupScript + "):\n" + cleanupCommands);
        try {
            getCliDriver().processLine(cleanupCommands);
        } catch (CommandProcessorException e) {
            LOG.error("Failed during cleanup processLine with code={}. Ignoring", e.getResponseCode());
        // TODO Convert this to an Assert.fail once HIVE-14682 is fixed
        }
    } else {
        LOG.info("No cleanup script detected. Skipping.");
    }
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) File(java.io.File)

Example 77 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class QTestDatasetHandler method unloadDataset.

public boolean unloadDataset(String table, CliDriver cliDriver) throws Exception {
    try {
        CommandProcessorResponse result = cliDriver.processLine("drop table " + table);
        LOG.info("Result from cliDrriver.processLine in initFromDatasets=" + result);
    } catch (CommandProcessorException e) {
        Assert.fail("Failed during initFromDatasets processLine with code=" + e);
    }
    return true;
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse)

Example 78 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class AbstractHCatStorerTest method testDateCharTypes.

/**
 * Create a data file with datatypes added in 0.13. Read it with Pig and use Pig + HCatStorer to
 * write to a Hive table. Then read it using Pig and Hive and make sure results match.
 */
@Test
public void testDateCharTypes() throws Exception {
    final String tblName = "junit_date_char";
    AbstractHCatLoaderTest.dropTable(tblName, driver);
    AbstractHCatLoaderTest.createTableDefaultDB(tblName, "id int, char5 char(5), varchar10 varchar(10), dec52 decimal(5,2)", null, driver, storageFormat);
    int NUM_ROWS = 5;
    String[] rows = new String[NUM_ROWS];
    for (int i = 0; i < NUM_ROWS; i++) {
        // since the file is read by Pig, we need to make sure the values are in format that Pig
        // understands
        // otherwise it will turn the value to NULL on read
        rows[i] = i + "\txxxxx\tyyy\t" + 5.2;
    }
    HcatTestUtils.createTestDataFile(INPUT_FILE_NAME, rows);
    LOG.debug("File=" + INPUT_FILE_NAME);
    // dumpFile(INPUT_FILE_NAME);
    PigServer server = createPigServer(true);
    int queryNumber = 1;
    logAndRegister(server, "A = load '" + INPUT_FILE_NAME + "' as (id:int, char5:chararray, varchar10:chararray, dec52:bigdecimal);", queryNumber++);
    logAndRegister(server, "store A into '" + tblName + "' using " + HCatStorer.class.getName() + "();", queryNumber++);
    logAndRegister(server, "B = load '" + tblName + "' using " + HCatLoader.class.getName() + "();", queryNumber);
    try {
        driver.run("select * from " + tblName);
    } catch (CommandProcessorException e) {
        LOG.debug("cpr.respCode=" + e.getResponseCode() + " cpr.errMsg=" + e.getMessage());
    }
    List l = new ArrayList();
    driver.getResults(l);
    LOG.debug("Dumping rows via SQL from " + tblName);
    /*
     * Unfortunately Timestamp.toString() adjusts the value for local TZ and 't' is a String thus
     * the timestamp in 't' doesn't match rawData
     */
    for (Object t : l) {
        LOG.debug(t == null ? null : t.toString());
    }
    Iterator<Tuple> itr = server.openIterator("B");
    int numRowsRead = 0;
    while (itr.hasNext()) {
        Tuple t = itr.next();
        StringBuilder rowFromPig = new StringBuilder();
        for (int i = 0; i < t.size(); i++) {
            rowFromPig.append(t.get(i)).append("\t");
        }
        rowFromPig.setLength(rowFromPig.length() - 1);
        assertEquals("Comparing Pig to Raw data", rows[numRowsRead], rowFromPig.toString());
        // see comment at "Dumping rows via SQL..." for why this doesn't work (for all types)
        // assertEquals("Comparing Pig to Hive", rowFromPig.toString(), l.get(numRowsRead));
        numRowsRead++;
    }
    assertEquals("Expected " + NUM_ROWS + " rows; got " + numRowsRead + " file=" + INPUT_FILE_NAME, NUM_ROWS, numRowsRead);
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) ArrayList(java.util.ArrayList) PigServer(org.apache.pig.PigServer) ArrayList(java.util.ArrayList) List(java.util.List) Tuple(org.apache.pig.data.Tuple) Test(org.junit.Test) HCatBaseTest(org.apache.hive.hcatalog.mapreduce.HCatBaseTest)

Example 79 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestMetastoreVersion method testMetastoreVersion.

/**
 * Test that with no verification, and record verification enabled, hive populates the schema
 * and version correctly
 * @throws Exception
 */
@Test
public void testMetastoreVersion() throws Exception {
    // let the schema and version be auto created
    System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false");
    System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION_RECORD_VERSION.toString(), "true");
    hiveConf = new HiveConf(this.getClass());
    SessionState.start(new CliSessionState(hiveConf));
    driver = DriverFactory.newDriver(hiveConf);
    try {
        driver.run("show tables");
        assert false;
    } catch (CommandProcessorException e) {
    // this is expected
    }
    // correct version stored by Metastore during startup
    assertEquals(metastoreSchemaInfo.getHiveSchemaVersion(), getVersion(hiveConf));
    setVersion(hiveConf, "foo");
    assertEquals("foo", getVersion(hiveConf));
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) Test(org.junit.Test)

Example 80 with CommandProcessorException

use of org.apache.hadoop.hive.ql.processors.CommandProcessorException in project hive by apache.

the class TestMetastoreVersion method testVersionMisMatch.

/**
 * Store garbage version in metastore and verify that hive fails when verification is on
 * @throws Exception
 */
@Test
public void testVersionMisMatch() throws Exception {
    System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false");
    hiveConf = new HiveConf(this.getClass());
    SessionState.start(new CliSessionState(hiveConf));
    driver = DriverFactory.newDriver(hiveConf);
    driver.run("show tables");
    ObjectStore.setSchemaVerified(false);
    System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "true");
    hiveConf = new HiveConf(this.getClass());
    setVersion(hiveConf, "fooVersion");
    SessionState.start(new CliSessionState(hiveConf));
    driver = DriverFactory.newDriver(hiveConf);
    try {
        driver.run("show tables");
        assert false;
    } catch (CommandProcessorException e) {
    // this is expected
    }
}
Also used : CommandProcessorException(org.apache.hadoop.hive.ql.processors.CommandProcessorException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) Test(org.junit.Test)

Aggregations

CommandProcessorException (org.apache.hadoop.hive.ql.processors.CommandProcessorException)85 Test (org.junit.Test)42 IOException (java.io.IOException)14 CommandProcessorResponse (org.apache.hadoop.hive.ql.processors.CommandProcessorResponse)14 Driver (org.apache.hadoop.hive.ql.Driver)12 ArrayList (java.util.ArrayList)10 HiveConf (org.apache.hadoop.hive.conf.HiveConf)10 QTestProcessExecResult (org.apache.hadoop.hive.ql.QTestProcessExecResult)9 Path (org.apache.hadoop.fs.Path)8 FileSystem (org.apache.hadoop.fs.FileSystem)7 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)6 File (java.io.File)5 IDriver (org.apache.hadoop.hive.ql.IDriver)5 FileNotFoundException (java.io.FileNotFoundException)4 LockException (org.apache.hadoop.hive.ql.lockmgr.LockException)4 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)4 UnsupportedEncodingException (java.io.UnsupportedEncodingException)3 Map (java.util.Map)3 Nullable (javax.annotation.Nullable)3 Database (org.apache.hadoop.hive.metastore.api.Database)3