Search in sources :

Example 71 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class ReplicationSemanticAnalyzer method analyzeReplStatus.

private void analyzeReplStatus(ASTNode ast) throws SemanticException {
    LOG.debug("ReplicationSemanticAnalyzer.analyzeReplStatus: " + String.valueOf(dbNameOrPattern) + "." + String.valueOf(tblNameOrPattern));
    String replLastId = null;
    try {
        Hive newDb;
        if (needNewdb) {
            newDb = Hive.get(conf, false);
        } else {
            newDb = db;
        }
        if (tblNameOrPattern != null) {
            // Checking for status of table
            Table tbl = newDb.getTable(dbNameOrPattern, tblNameOrPattern);
            if (tbl != null) {
                inputs.add(new ReadEntity(tbl));
                Map<String, String> params = tbl.getParameters();
                if (params != null && (params.containsKey(ReplicationSpec.KEY.CURR_STATE_ID.toString()))) {
                    replLastId = params.get(ReplicationSpec.KEY.CURR_STATE_ID.toString());
                }
            }
        } else {
            // Checking for status of a db
            Database database = newDb.getDatabase(dbNameOrPattern);
            if (database != null) {
                inputs.add(new ReadEntity(database));
                Map<String, String> params = database.getParameters();
                if (params != null && (params.containsKey(ReplicationSpec.KEY.CURR_STATE_ID.toString()))) {
                    replLastId = params.get(ReplicationSpec.KEY.CURR_STATE_ID.toString());
                }
            }
        }
    } catch (HiveException e) {
        // TODO : simple wrap & rethrow for now, clean up with error
        throw new SemanticException(e);
    // codes
    }
    prepareReturnValues(Collections.singletonList(replLastId), "last_repl_id#string");
    setFetchTask(createFetchTask("last_repl_id#string"));
    LOG.debug("ReplicationSemanticAnalyzer.analyzeReplStatus: writing repl.last.id={} out to {}", String.valueOf(replLastId), ctx.getResFile(), conf);
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Hive(org.apache.hadoop.hive.ql.metadata.Hive) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 72 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class TestDbTxnManager method setUp.

@Before
public void setUp() throws Exception {
    TxnDbUtil.prepDb(conf);
    txnMgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf);
    // init lock manager
    txnMgr.getLockManager();
    Assert.assertTrue(txnMgr instanceof DbTxnManager);
    nextInput = 1;
    readEntities = new HashSet<ReadEntity>();
    writeEntities = new HashSet<WriteEntity>();
    conf.setTimeVar(HiveConf.ConfVars.HIVE_TIMEDOUT_TXN_REAPER_START, 0, TimeUnit.SECONDS);
    conf.setTimeVar(HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 10, TimeUnit.SECONDS);
    houseKeeperService = new AcidHouseKeeperService();
    houseKeeperService.setConf(conf);
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) AcidHouseKeeperService(org.apache.hadoop.hive.metastore.txn.AcidHouseKeeperService) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) Before(org.junit.Before)

Example 73 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class TestReadEntityDirect method testSelectEntityInDirectJoinAlias.

/**
 * Underlying table of view should be marked as indirect. Query with join of views and aliases
 *
 * @throws ParseException
 */
@Test
public void testSelectEntityInDirectJoinAlias() throws ParseException {
    Driver driver = createDriver();
    int ret = driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)");
    assertEquals("Checking command success", 0, ret);
    assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
    for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
        if (readEntity.getName().equals("default@t1")) {
            assertFalse("not direct", readEntity.isDirect());
        } else if (readEntity.getName().equals("default@v1")) {
            assertTrue("direct", readEntity.isDirect());
        } else {
            fail("unexpected entity name " + readEntity.getName());
        }
    }
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Driver(org.apache.hadoop.hive.ql.Driver) Test(org.junit.Test)

Example 74 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class TestReadEntityDirect method testSelectEntityInDirect.

/**
 * Underlying table of view should be marked as indirect
 *
 * @throws ParseException
 */
@Test
public void testSelectEntityInDirect() throws ParseException {
    Driver driver = createDriver();
    int ret = driver.compile("select * from v1");
    assertEquals("Checking command success", 0, ret);
    assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
    for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
        if (readEntity.getName().equals("default@t1")) {
            assertFalse("not direct", readEntity.isDirect());
        } else if (readEntity.getName().equals("default@v1")) {
            assertTrue("direct", readEntity.isDirect());
        } else {
            fail("unexpected entity name " + readEntity.getName());
        }
    }
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Driver(org.apache.hadoop.hive.ql.Driver) Test(org.junit.Test)

Example 75 with ReadEntity

use of org.apache.hadoop.hive.ql.hooks.ReadEntity in project hive by apache.

the class TestReadEntityDirect method testSelectEntityViewDirectUnion.

/**
 * Underlying table of view should be marked as direct, as it is also accessed
 * directly in the union-all query
 *
 * @throws ParseException
 */
@Test
public void testSelectEntityViewDirectUnion() throws ParseException {
    Driver driver = createDriver();
    int ret = driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1");
    assertEquals("Checking command success", 0, ret);
    assertEquals(2, CheckInputReadEntityDirect.readEntities.size());
    for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) {
        if (readEntity.getName().equals("default@t1")) {
            assertTrue("direct", readEntity.isDirect());
        } else if (readEntity.getName().equals("default@v1")) {
            assertTrue("direct", readEntity.isDirect());
        } else {
            fail("unexpected entity name " + readEntity.getName());
        }
    }
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Driver(org.apache.hadoop.hive.ql.Driver) Test(org.junit.Test)

Aggregations

ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)75 Table (org.apache.hadoop.hive.ql.metadata.Table)35 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)34 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)24 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)18 Partition (org.apache.hadoop.hive.ql.metadata.Partition)18 ArrayList (java.util.ArrayList)15 Referenceable (org.apache.atlas.typesystem.Referenceable)10 LinkedHashMap (java.util.LinkedHashMap)9 AlterTableExchangePartition (org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition)9 HashMap (java.util.HashMap)8 Test (org.testng.annotations.Test)8 Path (org.apache.hadoop.fs.Path)7 FileNotFoundException (java.io.FileNotFoundException)6 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)5 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)5 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)5 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)5 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)5