use of org.apache.hadoop.hive.ql.IDriver in project hive by apache.
the class CliDriver method processLocalCmd.
int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) {
boolean escapeCRLF = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_ESCAPE_CRLF);
int ret = 0;
if (proc != null) {
if (proc instanceof IDriver) {
IDriver qp = (IDriver) proc;
PrintStream out = ss.out;
long start = System.currentTimeMillis();
if (ss.getIsVerbose()) {
out.println(cmd);
}
ret = qp.run(cmd).getResponseCode();
if (ret != 0) {
qp.close();
return ret;
}
// query has run capture the time
long end = System.currentTimeMillis();
double timeTaken = (end - start) / 1000.0;
ArrayList<String> res = new ArrayList<String>();
printHeader(qp, out);
// print the results
int counter = 0;
try {
if (out instanceof FetchConverter) {
((FetchConverter) out).fetchStarted();
}
while (qp.getResults(res)) {
for (String r : res) {
if (escapeCRLF) {
r = EscapeCRLFHelper.escapeCRLF(r);
}
out.println(r);
}
counter += res.size();
res.clear();
if (out.checkError()) {
break;
}
}
} catch (IOException e) {
console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
ret = 1;
}
qp.close();
if (out instanceof FetchConverter) {
((FetchConverter) out).fetchFinished();
}
console.printInfo("Time taken: " + timeTaken + " seconds" + (counter == 0 ? "" : ", Fetched: " + counter + " row(s)"));
} else {
String firstToken = tokenizeCmd(cmd.trim())[0];
String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length());
if (ss.getIsVerbose()) {
ss.out.println(firstToken + " " + cmd_1);
}
CommandProcessorResponse res = proc.run(cmd_1);
if (res.getResponseCode() != 0) {
ss.out.println("Query returned non-zero code: " + res.getResponseCode() + ", cause: " + res.getErrorMessage());
}
if (res.getConsoleMessages() != null) {
for (String consoleMsg : res.getConsoleMessages()) {
console.printInfo(consoleMsg);
}
}
ret = res.getResponseCode();
}
}
return ret;
}
use of org.apache.hadoop.hive.ql.IDriver in project hive by apache.
the class TestCliDriverMethods method headerPrintingTestDriver.
/**
* Do the actual testing against a mocked CliDriver based on what type of schema
*
* @param mockSchema
* Schema to throw against test
* @return Output that would have been sent to the user
* @throws CommandNeedRetryException
* won't actually be thrown
*/
private PrintStream headerPrintingTestDriver(Schema mockSchema) {
CliDriver cliDriver = new CliDriver();
// We want the driver to try to print the header...
Configuration conf = mock(Configuration.class);
when(conf.getBoolean(eq(ConfVars.HIVE_CLI_PRINT_HEADER.varname), anyBoolean())).thenReturn(true);
cliDriver.setConf(conf);
IDriver proc = mock(IDriver.class);
CommandProcessorResponse cpr = mock(CommandProcessorResponse.class);
when(cpr.getResponseCode()).thenReturn(0);
when(proc.run(anyString())).thenReturn(cpr);
// and then see what happens based on the provided schema
when(proc.getSchema()).thenReturn(mockSchema);
CliSessionState mockSS = mock(CliSessionState.class);
PrintStream mockOut = mock(PrintStream.class);
mockSS.out = mockOut;
cliDriver.processLocalCmd("use default;", proc, mockSS);
return mockOut;
}
use of org.apache.hadoop.hive.ql.IDriver in project hive by apache.
the class TestHiveTestEnvSetup method afterClass.
@AfterClass
public static void afterClass() throws Exception {
IDriver driver = createDriver();
dropTables(driver);
}
use of org.apache.hadoop.hive.ql.IDriver in project hive by apache.
the class TestCounterMapping method testUsageOfRuntimeInfo.
@Test
public void testUsageOfRuntimeInfo() throws ParseException {
IDriver driver = createDriver();
String query = "select sum(u) from tu where u>1";
PlanMapper pm1 = getMapperForQuery(driver, query);
List<FilterOperator> filters1 = pm1.getAll(FilterOperator.class);
filters1.sort(OPERATOR_ID_COMPARATOR.reversed());
FilterOperator filter1 = filters1.get(0);
driver = createDriver();
((ReExecDriver) driver).setRuntimeStatsSource(new SimpleRuntimeStatsSource(pm1));
PlanMapper pm2 = getMapperForQuery(driver, query);
List<FilterOperator> filters2 = pm2.getAll(FilterOperator.class);
filters2.sort(OPERATOR_ID_COMPARATOR.reversed());
FilterOperator filter2 = filters2.get(0);
assertEquals("original check", 7, filter1.getStatistics().getNumRows());
assertEquals("optimized check", 6, filter2.getStatistics().getNumRows());
}
use of org.apache.hadoop.hive.ql.IDriver in project hive by apache.
the class TestCounterMapping method beforeClass.
@BeforeClass
public static void beforeClass() throws Exception {
IDriver driver = createDriver();
dropTables(driver);
String[] cmds = { // @formatter:off
"create table s (x int)", "insert into s values (1),(2),(3),(4),(5),(6),(7),(8),(9),(10)", "create table tu(id_uv int,id_uw int,u int)", "create table tv(id_uv int,v int)", "create table tw(id_uw int,w int)", "from s\n" + "insert overwrite table tu\n" + " select x,x,x\n" + " where x<=6 or x=10\n" + "insert overwrite table tv\n" + " select x,x\n" + " where x<=3 or x=10\n" + "insert overwrite table tw\n" + " select x,x\n" + "" // @formatter:on
};
for (String cmd : cmds) {
int ret = driver.run(cmd).getResponseCode();
assertEquals("Checking command success", 0, ret);
}
}
Aggregations