use of org.apache.hadoop.hive.metastore.api.EnvironmentContext in project hive by apache.
the class TestHiveMetaStoreWithEnvironmentContext method setUp.
@Override
protected void setUp() throws Exception {
super.setUp();
System.setProperty("hive.metastore.event.listeners", DummyListener.class.getName());
int port = MetaStoreUtils.findFreePort();
MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
hiveConf = new HiveConf(this.getClass());
hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
SessionState.start(new CliSessionState(hiveConf));
msc = new HiveMetaStoreClient(hiveConf);
msc.dropDatabase(dbName, true, true);
Map<String, String> envProperties = new HashMap<String, String>();
envProperties.put("hadoop.job.ugi", "test_user");
envContext = new EnvironmentContext(envProperties);
db.setName(dbName);
Map<String, String> tableParams = new HashMap<String, String>();
tableParams.put("a", "string");
List<FieldSchema> partitionKeys = new ArrayList<FieldSchema>();
partitionKeys.add(new FieldSchema("b", "string", ""));
List<FieldSchema> cols = new ArrayList<FieldSchema>();
cols.add(new FieldSchema("a", "string", ""));
cols.add(new FieldSchema("b", "string", ""));
StorageDescriptor sd = new StorageDescriptor();
sd.setCols(cols);
sd.setCompressed(false);
sd.setParameters(tableParams);
sd.setSerdeInfo(new SerDeInfo());
sd.getSerdeInfo().setName(tblName);
sd.getSerdeInfo().setParameters(new HashMap<String, String>());
sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName());
sd.setInputFormat(HiveInputFormat.class.getName());
sd.setOutputFormat(HiveOutputFormat.class.getName());
table.setDbName(dbName);
table.setTableName(tblName);
table.setParameters(tableParams);
table.setPartitionKeys(partitionKeys);
table.setSd(sd);
List<String> partValues = new ArrayList<String>();
partValues.add("2011");
partition.setDbName(dbName);
partition.setTableName(tblName);
partition.setValues(partValues);
partition.setSd(table.getSd().deepCopy());
partition.getSd().setSerdeInfo(table.getSd().getSerdeInfo().deepCopy());
DummyListener.notifyList.clear();
}
Aggregations