Search in sources :

Example 11 with EnvironmentContext

use of org.apache.hadoop.hive.metastore.api.EnvironmentContext in project hive by apache.

the class TestHiveMetaStoreWithEnvironmentContext method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    System.setProperty("hive.metastore.event.listeners", DummyListener.class.getName());
    int port = MetaStoreUtils.findFreePort();
    MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
    hiveConf = new HiveConf(this.getClass());
    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    SessionState.start(new CliSessionState(hiveConf));
    msc = new HiveMetaStoreClient(hiveConf);
    msc.dropDatabase(dbName, true, true);
    Map<String, String> envProperties = new HashMap<String, String>();
    envProperties.put("hadoop.job.ugi", "test_user");
    envContext = new EnvironmentContext(envProperties);
    db.setName(dbName);
    Map<String, String> tableParams = new HashMap<String, String>();
    tableParams.put("a", "string");
    List<FieldSchema> partitionKeys = new ArrayList<FieldSchema>();
    partitionKeys.add(new FieldSchema("b", "string", ""));
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    cols.add(new FieldSchema("a", "string", ""));
    cols.add(new FieldSchema("b", "string", ""));
    StorageDescriptor sd = new StorageDescriptor();
    sd.setCols(cols);
    sd.setCompressed(false);
    sd.setParameters(tableParams);
    sd.setSerdeInfo(new SerDeInfo());
    sd.getSerdeInfo().setName(tblName);
    sd.getSerdeInfo().setParameters(new HashMap<String, String>());
    sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
    sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName());
    sd.setInputFormat(HiveInputFormat.class.getName());
    sd.setOutputFormat(HiveOutputFormat.class.getName());
    table.setDbName(dbName);
    table.setTableName(tblName);
    table.setParameters(tableParams);
    table.setPartitionKeys(partitionKeys);
    table.setSd(sd);
    List<String> partValues = new ArrayList<String>();
    partValues.add("2011");
    partition.setDbName(dbName);
    partition.setTableName(tblName);
    partition.setValues(partValues);
    partition.setSd(table.getSd().deepCopy());
    partition.getSd().setSerdeInfo(table.getSd().getSerdeInfo().deepCopy());
    DummyListener.notifyList.clear();
}
Also used : HashMap(java.util.HashMap) LazySimpleSerDe(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) HiveOutputFormat(org.apache.hadoop.hive.ql.io.HiveOutputFormat) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) EnvironmentContext(org.apache.hadoop.hive.metastore.api.EnvironmentContext) HiveInputFormat(org.apache.hadoop.hive.ql.io.HiveInputFormat) HiveConf(org.apache.hadoop.hive.conf.HiveConf)

Aggregations

EnvironmentContext (org.apache.hadoop.hive.metastore.api.EnvironmentContext)11 ArrayList (java.util.ArrayList)5 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)5 Partition (org.apache.hadoop.hive.ql.metadata.Partition)5 Path (org.apache.hadoop.fs.Path)4 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)4 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)3 Table (org.apache.hadoop.hive.ql.metadata.Table)3 URISyntaxException (java.net.URISyntaxException)2 List (java.util.List)2 FileStatus (org.apache.hadoop.fs.FileStatus)2 FileSystem (org.apache.hadoop.fs.FileSystem)2 HiveConf (org.apache.hadoop.hive.conf.HiveConf)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2 SkewedInfo (org.apache.hadoop.hive.metastore.api.SkewedInfo)2 StorageDescriptor (org.apache.hadoop.hive.metastore.api.StorageDescriptor)2 AlterTableExchangePartition (org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition)2 ThreadFactoryBuilder (com.google.common.util.concurrent.ThreadFactoryBuilder)1 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1