Search in sources :

Example 1 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestHCatMultiOutputFormat method initializeSetup.

private static void initializeSetup() throws Exception {
    hiveConf = new HiveConf(mrConf, TestHCatMultiOutputFormat.class);
    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort);
    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
    hiveConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
    System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
    hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehousedir.toString());
    try {
        hmsc = new HiveMetaStoreClient(hiveConf);
        initalizeTables();
    } catch (Throwable e) {
        LOG.error("Exception encountered while setting up testcase", e);
        throw new Exception(e);
    } finally {
        hmsc.close();
    }
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) HCatSemanticAnalyzer(org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HCatException(org.apache.hive.hcatalog.common.HCatException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Example 2 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestReplicationScenarios method setUpBeforeClass.

// if verifySetup is set to true, all the test setup we do will perform additional
// verifications as well, which is useful to verify that our setup occurred
// correctly when developing and debugging tests. These verifications, however
// do not test any new functionality for replication, and thus, are not relevant
// for testing replication itself. For steady state, we want this to be false.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
    hconf = new HiveConf(TestReplicationScenarios.class);
    String metastoreUri = System.getProperty("test." + HiveConf.ConfVars.METASTOREURIS.varname);
    if (metastoreUri != null) {
        hconf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUri);
        useExternalMS = true;
        return;
    }
    hconf.setVar(HiveConf.ConfVars.METASTORE_TRANSACTIONAL_EVENT_LISTENERS, // turn on db notification listener on metastore
    DBNOTIF_LISTENER_CLASSNAME);
    hconf.setBoolVar(HiveConf.ConfVars.REPLCMENABLED, true);
    hconf.setBoolVar(HiveConf.ConfVars.FIRE_EVENTS_FOR_DML, true);
    hconf.setVar(HiveConf.ConfVars.REPLCMDIR, TEST_PATH + "/cmroot/");
    msPort = MetaStoreUtils.startMetaStore(hconf);
    hconf.setVar(HiveConf.ConfVars.REPLDIR, TEST_PATH + "/hrepl/");
    hconf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort);
    hconf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hconf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hconf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
    System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
    Path testPath = new Path(TEST_PATH);
    FileSystem fs = FileSystem.get(testPath.toUri(), hconf);
    fs.mkdirs(testPath);
    driver = new Driver(hconf);
    SessionState.start(new CliSessionState(hconf));
    metaStoreClient = new HiveMetaStoreClient(hconf);
}
Also used : Path(org.apache.hadoop.fs.Path) HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) FileSystem(org.apache.hadoop.fs.FileSystem) HiveConf(org.apache.hadoop.hive.conf.HiveConf) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) BeforeClass(org.junit.BeforeClass)

Example 3 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project storm by apache.

the class HiveSetupUtil method createDbAndTable.

public static void createDbAndTable(HiveConf conf, String databaseName, String tableName, List<String> partVals, String[] colNames, String[] colTypes, String[] partNames, String dbLocation) throws Exception {
    IMetaStoreClient client = new HiveMetaStoreClient(conf);
    try {
        Database db = new Database();
        db.setName(databaseName);
        db.setLocationUri(dbLocation);
        client.createDatabase(db);
        Table tbl = new Table();
        tbl.setDbName(databaseName);
        tbl.setTableName(tableName);
        tbl.setTableType(TableType.MANAGED_TABLE.toString());
        StorageDescriptor sd = new StorageDescriptor();
        sd.setCols(getTableColumns(colNames, colTypes));
        sd.setNumBuckets(1);
        sd.setLocation(dbLocation + Path.SEPARATOR + tableName);
        if (partNames != null && partNames.length != 0) {
            tbl.setPartitionKeys(getPartitionKeys(partNames));
        }
        tbl.setSd(sd);
        sd.setBucketCols(new ArrayList<String>(2));
        sd.setSerdeInfo(new SerDeInfo());
        sd.getSerdeInfo().setName(tbl.getTableName());
        sd.getSerdeInfo().setParameters(new HashMap<String, String>());
        sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
        sd.getSerdeInfo().setSerializationLib(OrcSerde.class.getName());
        sd.setInputFormat(OrcInputFormat.class.getName());
        sd.setOutputFormat(OrcOutputFormat.class.getName());
        Map<String, String> tableParams = new HashMap<String, String>();
        tbl.setParameters(tableParams);
        client.createTable(tbl);
        try {
            if (partVals != null && partVals.size() > 0) {
                addPartition(client, tbl, partVals);
            }
        } catch (AlreadyExistsException e) {
        }
    } finally {
        client.close();
    }
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) Table(org.apache.hadoop.hive.metastore.api.Table) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) HashMap(java.util.HashMap) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) OrcOutputFormat(org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat) OrcSerde(org.apache.hadoop.hive.ql.io.orc.OrcSerde) OrcInputFormat(org.apache.hadoop.hive.ql.io.orc.OrcInputFormat) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 4 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class HiveMetastoreTest method startMetastore.

@BeforeClass
public static void startMetastore() throws Exception {
    HiveMetastoreTest.metastore = new TestHiveMetastore();
    metastore.start();
    HiveMetastoreTest.hiveConf = metastore.hiveConf();
    HiveMetastoreTest.metastoreClient = new HiveMetaStoreClient(hiveConf);
    String dbPath = metastore.getDatabasePath(DB_NAME);
    Database db = new Database(DB_NAME, "description", dbPath, new HashMap<>());
    metastoreClient.createDatabase(db);
    HiveMetastoreTest.catalog = (HiveCatalog) CatalogUtil.loadCatalog(HiveCatalog.class.getName(), CatalogUtil.ICEBERG_CATALOG_TYPE_HIVE, ImmutableMap.of(CatalogProperties.CLIENT_POOL_CACHE_EVICTION_INTERVAL_MS, String.valueOf(EVICTION_INTERVAL)), hiveConf);
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) Database(org.apache.hadoop.hive.metastore.api.Database) BeforeClass(org.junit.BeforeClass)

Example 5 with HiveMetaStoreClient

use of org.apache.hadoop.hive.metastore.HiveMetaStoreClient in project hive by apache.

the class TestHiveCommitLocks method initializeSpies.

@BeforeClass
public static void initializeSpies() throws Exception {
    overriddenHiveConf.setLong("iceberg.hive.lock-timeout-ms", 6 * 1000);
    overriddenHiveConf.setLong("iceberg.hive.lock-check-min-wait-ms", 50);
    overriddenHiveConf.setLong("iceberg.hive.lock-check-max-wait-ms", 5 * 1000);
    // Set up the spy clients as static variables instead of before every test.
    // The spy clients are reused between methods and closed at the end of all tests in this class.
    spyClientPool = spy(new HiveClientPool(1, overriddenHiveConf));
    when(spyClientPool.newClient()).thenAnswer(invocation -> {
        // cannot spy on RetryingHiveMetastoreClient as it is a proxy
        IMetaStoreClient client = spy(new HiveMetaStoreClient(hiveConf));
        spyClientRef.set(client);
        return spyClientRef.get();
    });
    // To ensure new client is created.
    spyClientPool.run(IMetaStoreClient::isLocalMetaStore);
    spyCachedClientPool = spy(new CachedClientPool(hiveConf, Collections.emptyMap()));
    when(spyCachedClientPool.clientPool()).thenAnswer(invocation -> spyClientPool);
    Assert.assertNotNull(spyClientRef.get());
    spyClient = spyClientRef.get();
}
Also used : HiveMetaStoreClient(org.apache.hadoop.hive.metastore.HiveMetaStoreClient) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) BeforeClass(org.junit.BeforeClass)

Aggregations

HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)141 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)81 Test (org.junit.Test)78 Table (org.apache.hadoop.hive.metastore.api.Table)60 FileSystem (org.apache.hadoop.fs.FileSystem)57 Path (org.apache.hadoop.fs.Path)45 HiveConf (org.apache.hadoop.hive.conf.HiveConf)31 Before (org.junit.Before)23 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)18 FileStatus (org.apache.hadoop.fs.FileStatus)17 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)16 File (java.io.File)12 IOException (java.io.IOException)12 HiveStreamingConnection (org.apache.hive.streaming.HiveStreamingConnection)12 ArrayList (java.util.ArrayList)11 TxnStore (org.apache.hadoop.hive.metastore.txn.TxnStore)10 StreamingConnection (org.apache.hive.streaming.StreamingConnection)10 List (java.util.List)9 HashMap (java.util.HashMap)8 CompactionRequest (org.apache.hadoop.hive.metastore.api.CompactionRequest)8