Search in sources :

Example 21 with Warehouse

use of org.apache.hadoop.hive.metastore.Warehouse in project hive by apache.

the class SemanticAnalyzer method handleLineage.

private void handleLineage(LoadTableDesc ltd, Operator output) throws SemanticException {
    if (ltd != null) {
        queryState.getLineageState().mapDirToOp(ltd.getSourcePath(), output);
    }
    if (queryState.getCommandType().equals(HiveOperation.CREATETABLE_AS_SELECT.getOperationName())) {
        Path tlocation = null;
        String tName = Utilities.getDbTableName(tableDesc.getDbTableName())[1];
        try {
            Warehouse wh = new Warehouse(conf);
            tlocation = wh.getDefaultTablePath(db.getDatabase(tableDesc.getDatabaseName()), tName, tableDesc.isExternal());
        } catch (MetaException | HiveException e) {
            throw new SemanticException(e);
        }
        queryState.getLineageState().mapDirToOp(tlocation, output);
    } else if (queryState.getCommandType().equals(HiveOperation.CREATE_MATERIALIZED_VIEW.getOperationName())) {
        Path tlocation;
        String[] dbTable = Utilities.getDbTableName(createVwDesc.getViewName());
        try {
            Warehouse wh = new Warehouse(conf);
            Map<String, String> tblProps = createVwDesc.getTblProps();
            tlocation = wh.getDefaultTablePath(db.getDatabase(dbTable[0]), dbTable[1], tblProps == null || !AcidUtils.isTablePropertyTransactional(tblProps));
        } catch (MetaException | HiveException e) {
            throw new SemanticException(e);
        }
        queryState.getLineageState().mapDirToOp(tlocation, output);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Warehouse(org.apache.hadoop.hive.metastore.Warehouse) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) TreeMap(java.util.TreeMap) ImmutableMap(com.google.common.collect.ImmutableMap) SortedMap(java.util.SortedMap) HashMap(java.util.HashMap) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)

Example 22 with Warehouse

use of org.apache.hadoop.hive.metastore.Warehouse in project hive by apache.

the class LoadTable method tableLocation.

static TableLocationTuple tableLocation(ImportTableDesc tblDesc, Database parentDb, TableContext tableContext, Context context) throws MetaException, SemanticException {
    Warehouse wh = context.warehouse;
    Path defaultTablePath;
    if (parentDb == null) {
        defaultTablePath = wh.getDefaultTablePath(tblDesc.getDatabaseName(), tblDesc.getTableName(), tblDesc.isExternal());
    } else {
        defaultTablePath = wh.getDefaultTablePath(parentDb, tblDesc.getTableName(), tblDesc.isExternal());
    }
    // dont use TableType.EXTERNAL_TABLE.equals(tblDesc.tableType()) since this comes in as managed always for tables.
    if (tblDesc.isExternal()) {
        if (tblDesc.getLocation() == null) {
            // related rules to be applied to replicated tables across different versions of hive.
            return new TableLocationTuple(wh.getDnsPath(defaultTablePath).toString(), true);
        }
        String currentLocation = new Path(tblDesc.getLocation()).toUri().getPath();
        String newLocation = ReplExternalTables.externalTableLocation(context.hiveConf, currentLocation);
        LOG.debug("external table {} data location is: {}", tblDesc.getTableName(), newLocation);
        return new TableLocationTuple(newLocation, false);
    }
    Path path = tableContext.waitOnPrecursor() ? wh.getDnsPath(defaultTablePath) : wh.getDefaultTablePath(parentDb, tblDesc.getTableName(), tblDesc.isExternal());
    return new TableLocationTuple(path.toString(), false);
}
Also used : Path(org.apache.hadoop.fs.Path) Warehouse(org.apache.hadoop.hive.metastore.Warehouse)

Example 23 with Warehouse

use of org.apache.hadoop.hive.metastore.Warehouse in project hive by apache.

the class TestSessionHiveMetastoreClientExchangePartitionsTempTable method setUp.

@Before
@Override
public void setUp() throws Exception {
    initHiveConf();
    wh = new Warehouse(conf);
    SessionState.start(conf);
    setClient(Hive.get(conf).getMSC());
    getClient().dropDatabase(DB_NAME, true, true, true);
    getMetaStore().cleanWarehouseDirs();
    createTestTables();
}
Also used : Warehouse(org.apache.hadoop.hive.metastore.Warehouse) Before(org.junit.Before)

Example 24 with Warehouse

use of org.apache.hadoop.hive.metastore.Warehouse in project hive by apache.

the class TestCachedStore method setUp.

@Before
public void setUp() throws Exception {
    Deadline.registerIfNot(10000000);
    Deadline.startTimer("");
    Configuration conf = MetastoreConf.newMetastoreConf();
    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true);
    MetaStoreTestUtils.setConfForStandloneMode(conf);
    ObjectStore objectStore = new ObjectStore();
    objectStore.setConf(conf);
    // Create the 'hive' catalog
    HMSHandler.createDefaultCatalog(objectStore, new Warehouse(conf));
    // Create 2 database objects
    db1 = createDatabaseObject("cs_db1", "user1");
    objectStore.createDatabase(db1);
    db2 = createDatabaseObject("cs_db2", "user1");
    objectStore.createDatabase(db2);
    // For each database object, create one partitioned and one unpartitioned table
    db1Utbl1 = createUnpartitionedTableObject(db1);
    objectStore.createTable(db1Utbl1);
    db1Ptbl1 = createPartitionedTableObject(db1);
    objectStore.createTable(db1Ptbl1);
    db2Utbl1 = createUnpartitionedTableObject(db2);
    objectStore.createTable(db2Utbl1);
    db2Ptbl1 = createPartitionedTableObject(db2);
    objectStore.createTable(db2Ptbl1);
    // Create partitions for cs_db1's partitioned table
    db1Ptbl1Ptns = createPartitionObjects(db1Ptbl1).getPartitions();
    db1Ptbl1PtnNames = createPartitionObjects(db1Ptbl1).getPartitionNames();
    objectStore.addPartitions(db1Ptbl1.getCatName(), db1Ptbl1.getDbName(), db1Ptbl1.getTableName(), db1Ptbl1Ptns);
    // Create partitions for cs_db2's partitioned table
    db2Ptbl1Ptns = createPartitionObjects(db2Ptbl1).getPartitions();
    db2Ptbl1PtnNames = createPartitionObjects(db2Ptbl1).getPartitionNames();
    objectStore.addPartitions(db2Ptbl1.getCatName(), db2Ptbl1.getDbName(), db2Ptbl1.getTableName(), db2Ptbl1Ptns);
    objectStore.shutdown();
}
Also used : ObjectStore(org.apache.hadoop.hive.metastore.ObjectStore) Warehouse(org.apache.hadoop.hive.metastore.Warehouse) Configuration(org.apache.hadoop.conf.Configuration) Before(org.junit.Before)

Example 25 with Warehouse

use of org.apache.hadoop.hive.metastore.Warehouse in project hive by apache.

the class TestCatalogCaching method createObjectStore.

@Before
public void createObjectStore() throws MetaException, InvalidOperationException {
    conf = MetastoreConf.newMetastoreConf();
    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true);
    MetaStoreTestUtils.setConfForStandloneMode(conf);
    objectStore = new ObjectStore();
    objectStore.setConf(conf);
    // Create three catalogs
    HMSHandler.createDefaultCatalog(objectStore, new Warehouse(conf));
    Catalog cat1 = new CatalogBuilder().setName(CAT1_NAME).setLocation("/tmp/cat1").build();
    objectStore.createCatalog(cat1);
    Catalog cat2 = new CatalogBuilder().setName(CAT2_NAME).setLocation("/tmp/cat2").build();
    objectStore.createCatalog(cat2);
}
Also used : ObjectStore(org.apache.hadoop.hive.metastore.ObjectStore) Warehouse(org.apache.hadoop.hive.metastore.Warehouse) CatalogBuilder(org.apache.hadoop.hive.metastore.client.builder.CatalogBuilder) Catalog(org.apache.hadoop.hive.metastore.api.Catalog) Before(org.junit.Before)

Aggregations

Warehouse (org.apache.hadoop.hive.metastore.Warehouse)31 Path (org.apache.hadoop.fs.Path)15 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)14 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)14 Table (org.apache.hadoop.hive.ql.metadata.Table)10 ArrayList (java.util.ArrayList)8 IOException (java.io.IOException)7 FileSystem (org.apache.hadoop.fs.FileSystem)6 Table (org.apache.hadoop.hive.metastore.api.Table)6 HashMap (java.util.HashMap)4 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)4 Test (org.junit.Test)4 URI (java.net.URI)3 URISyntaxException (java.net.URISyntaxException)3 LinkedHashMap (java.util.LinkedHashMap)3 Configuration (org.apache.hadoop.conf.Configuration)3 ObjectStore (org.apache.hadoop.hive.metastore.ObjectStore)3 MetastoreUnitTest (org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest)3 EnvironmentContext (org.apache.hadoop.hive.metastore.api.EnvironmentContext)3 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)3