use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class HCatCli method main.
@SuppressWarnings("static-access")
public static void main(String[] args) {
try {
LogUtils.initHiveLog4j();
} catch (LogInitializationException e) {
}
LOG = LoggerFactory.getLogger(HCatCli.class);
CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
ss.in = System.in;
try {
ss.out = new SessionStream(System.out, true, "UTF-8");
ss.err = new SessionStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException e) {
System.exit(1);
}
HiveConf conf = ss.getConf();
HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
String engine = HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE);
final String MR_ENGINE = "mr";
if (!MR_ENGINE.equalsIgnoreCase(engine)) {
HiveConf.setVar(conf, ConfVars.HIVE_EXECUTION_ENGINE, MR_ENGINE);
LOG.info("Forcing " + ConfVars.HIVE_EXECUTION_ENGINE + " to " + MR_ENGINE);
}
Options options = new Options();
// -e 'quoted-query-string'
options.addOption(OptionBuilder.hasArg().withArgName("exec").withDescription("hcat command given from command line").create('e'));
// -f <query-file>
options.addOption(OptionBuilder.hasArg().withArgName("file").withDescription("hcat commands in file").create('f'));
// -g
options.addOption(OptionBuilder.hasArg().withArgName("group").withDescription("group for the db/table specified in CREATE statement").create('g'));
// -p
options.addOption(OptionBuilder.hasArg().withArgName("perms").withDescription("permissions for the db/table specified in CREATE statement").create('p'));
// -D
options.addOption(OptionBuilder.hasArgs(2).withArgName("property=value").withValueSeparator().withDescription("use hadoop value for given property").create('D'));
// [-h|--help]
options.addOption(new Option("h", "help", false, "Print help information"));
Parser parser = new GnuParser();
CommandLine cmdLine = null;
try {
cmdLine = parser.parse(options, args);
} catch (ParseException e) {
printUsage(options, System.err);
// Note, we print to System.err instead of ss.err, because if we can't parse our
// commandline, we haven't even begun, and therefore cannot be expected to have
// reasonably constructed or started the SessionState.
System.exit(1);
}
// -D : process these first, so that we can instantiate SessionState appropriately.
setConfProperties(conf, cmdLine.getOptionProperties("D"));
// -h
if (cmdLine.hasOption('h')) {
printUsage(options, ss.out);
sysExit(ss, 0);
}
// -e
String execString = (String) cmdLine.getOptionValue('e');
// -f
String fileName = (String) cmdLine.getOptionValue('f');
if (execString != null && fileName != null) {
ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
printUsage(options, ss.err);
sysExit(ss, 1);
}
// -p
String perms = (String) cmdLine.getOptionValue('p');
if (perms != null) {
validatePermissions(ss, conf, perms);
}
// -g
String grp = (String) cmdLine.getOptionValue('g');
if (grp != null) {
conf.set(HCatConstants.HCAT_GROUP, grp);
}
// Now that the properties are in, we can instantiate SessionState.
SessionState.start(ss);
// all done parsing, let's run stuff!
if (execString != null) {
// remove the leading and trailing quotes. hcatalog can miss on some cases.
if (execString.length() > 1 && execString.startsWith("\"") && execString.endsWith("\"")) {
execString = execString.substring(1, execString.length() - 1);
}
sysExit(ss, processLine(execString));
}
try {
if (fileName != null) {
sysExit(ss, processFile(fileName));
}
} catch (FileNotFoundException e) {
ss.err.println("Input file not found. (" + e.getMessage() + ")");
sysExit(ss, 1);
} catch (IOException e) {
ss.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
sysExit(ss, 1);
}
// -h
printUsage(options, ss.err);
sysExit(ss, 1);
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestPigHBaseStorageHandler method Initialize.
public void Initialize() throws Exception {
hcatConf = new HiveConf(this.getClass());
// hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
// HCatSemanticAnalyzer.class.getName());
URI fsuri = getFileSystem().getUri();
Path whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(), getTestDir());
hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hcatConf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString());
hcatConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
// Add hbase properties
for (Map.Entry<String, String> el : getHbaseConf()) {
if (el.getKey().startsWith("hbase.")) {
hcatConf.set(el.getKey(), el.getValue());
}
}
driver = DriverFactory.newDriver(hcatConf);
SessionState.start(new CliSessionState(hcatConf));
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestStreaming method setup.
@Before
public void setup() throws Exception {
SessionState.start(new CliSessionState(conf));
driver = DriverFactory.newDriver(conf);
// make sure Driver returns all results
driver.setMaxRows(200002);
// drop and recreate the necessary databases and tables
dropDB(msClient, dbName);
String[] colNames = new String[] { COL1, COL2 };
String[] colTypes = new String[] { serdeConstants.INT_TYPE_NAME, serdeConstants.STRING_TYPE_NAME };
String[] bucketCols = new String[] { COL1 };
String loc1 = dbFolder.newFolder(dbName + ".db").toString();
String[] partNames = new String[] { "Continent", "Country" };
partLoc = createDbAndTable(driver, dbName, tblName, partitionVals, colNames, colTypes, bucketCols, partNames, loc1, 1);
dropDB(msClient, dbName2);
String loc2 = dbFolder.newFolder(dbName2 + ".db").toString();
partLoc2 = createDbAndTable(driver, dbName2, tblName2, null, colNames, colTypes, bucketCols, null, loc2, 2);
String loc3 = dbFolder.newFolder("testing5.db").toString();
createStoreSales("testing5", loc3);
runDDL(driver, "drop table testBucketing3.streamedtable");
runDDL(driver, "drop table testBucketing3.finaltable");
runDDL(driver, "drop table testBucketing3.nobucket");
}
use of org.apache.hadoop.hive.cli.CliSessionState in project hive by apache.
the class TestStreamingDynamicPartitioning method setup.
@Before
public void setup() throws Exception {
SessionState.start(new CliSessionState(conf));
driver = DriverFactory.newDriver(conf);
// make sure Driver returns all results
driver.setMaxRows(200002);
// drop and recreate the necessary databases and tables
dropDB(msClient, dbName);
createDbAndTable(driver, dbName, tblName, null, fieldNames, colTypes, bucketCols, partNames, loc1, 1);
dropDB(msClient, dbName2);
String loc2 = dbFolder.newFolder(dbName2 + ".db").toString();
String loc3 = dbFolder.newFolder("testing5.db").toString();
createStoreSales("testing5", loc3);
runDDL(driver, "drop table testBucketing3.streamedtable");
runDDL(driver, "drop table testBucketing3.finaltable");
runDDL(driver, "drop table testBucketing3.nobucket");
}
Aggregations