Search in sources :

Example 16 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class HCatClientHMSImpl method listPartitionsByFilter.

@Override
public List<HCatPartition> listPartitionsByFilter(String dbName, String tblName, String filter) throws HCatException {
    List<HCatPartition> hcatPtns = new ArrayList<HCatPartition>();
    try {
        HCatTable table = getTable(dbName, tblName);
        List<Partition> hivePtns = hmsClient.listPartitionsByFilter(table.getDbName(), table.getTableName(), filter, (short) -1);
        for (Partition ptn : hivePtns) {
            hcatPtns.add(new HCatPartition(table, ptn));
        }
    } catch (MetaException e) {
        throw new HCatException("MetaException while fetching partitions.", e);
    } catch (NoSuchObjectException e) {
        throw new ObjectNotFoundException("NoSuchObjectException while fetching partitions.", e);
    } catch (TException e) {
        throw new ConnectionFailureException("TException while fetching partitions.", e);
    }
    return hcatPtns;
}
Also used : TException(org.apache.thrift.TException) Partition(org.apache.hadoop.hive.metastore.api.Partition) ArrayList(java.util.ArrayList) HCatException(org.apache.hive.hcatalog.common.HCatException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 17 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class HCatClientHMSImpl method getPartitions.

@Override
public List<HCatPartition> getPartitions(String dbName, String tblName) throws HCatException {
    List<HCatPartition> hcatPtns = new ArrayList<HCatPartition>();
    try {
        HCatTable hcatTable = getTable(dbName, tblName);
        List<Partition> hivePtns = hmsClient.listPartitions(checkDB(dbName), tblName, (short) -1);
        for (Partition ptn : hivePtns) {
            hcatPtns.add(new HCatPartition(hcatTable, ptn));
        }
    } catch (NoSuchObjectException e) {
        throw new ObjectNotFoundException("NoSuchObjectException while retrieving partition.", e);
    } catch (MetaException e) {
        throw new HCatException("MetaException while retrieving partition.", e);
    } catch (TException e) {
        throw new ConnectionFailureException("TException while retrieving partition.", e);
    }
    return hcatPtns;
}
Also used : TException(org.apache.thrift.TException) Partition(org.apache.hadoop.hive.metastore.api.Partition) ArrayList(java.util.ArrayList) HCatException(org.apache.hive.hcatalog.common.HCatException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 18 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project hive by apache.

the class TestReplicationScenarios method testDrops.

@Test
public void testDrops() throws IOException {
    String testName = "drops";
    LOG.info("Testing " + testName);
    String dbName = testName + "_" + tid;
    run("CREATE DATABASE " + dbName);
    run("CREATE TABLE " + dbName + ".unptned(a string) STORED AS TEXTFILE");
    run("CREATE TABLE " + dbName + ".ptned(a string) partitioned by (b string) STORED AS TEXTFILE");
    run("CREATE TABLE " + dbName + ".ptned2(a string) partitioned by (b string) STORED AS TEXTFILE");
    run("CREATE TABLE " + dbName + ".ptned3(a string) partitioned by (b int) STORED AS TEXTFILE");
    String[] unptn_data = new String[] { "eleven", "twelve" };
    String[] ptn_data_1 = new String[] { "thirteen", "fourteen", "fifteen" };
    String[] ptn_data_2 = new String[] { "fifteen", "sixteen", "seventeen" };
    String[] empty = new String[] {};
    String unptn_locn = new Path(TEST_PATH, testName + "_unptn").toUri().getPath();
    String ptn_locn_1 = new Path(TEST_PATH, testName + "_ptn1").toUri().getPath();
    String ptn_locn_2 = new Path(TEST_PATH, testName + "_ptn2").toUri().getPath();
    createTestDataFile(unptn_locn, unptn_data);
    createTestDataFile(ptn_locn_1, ptn_data_1);
    createTestDataFile(ptn_locn_2, ptn_data_2);
    run("LOAD DATA LOCAL INPATH '" + unptn_locn + "' OVERWRITE INTO TABLE " + dbName + ".unptned");
    verifySetup("SELECT * from " + dbName + ".unptned", unptn_data);
    run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + dbName + ".ptned PARTITION(b='1')");
    verifySetup("SELECT a from " + dbName + ".ptned WHERE b='1'", ptn_data_1);
    run("LOAD DATA LOCAL INPATH '" + ptn_locn_2 + "' OVERWRITE INTO TABLE " + dbName + ".ptned PARTITION(b='2')");
    verifySetup("SELECT a from " + dbName + ".ptned WHERE b='2'", ptn_data_2);
    run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + dbName + ".ptned2 PARTITION(b='1')");
    verifySetup("SELECT a from " + dbName + ".ptned2 WHERE b='1'", ptn_data_1);
    run("LOAD DATA LOCAL INPATH '" + ptn_locn_2 + "' OVERWRITE INTO TABLE " + dbName + ".ptned2 PARTITION(b='2')");
    verifySetup("SELECT a from " + dbName + ".ptned2 WHERE b='2'", ptn_data_2);
    run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + dbName + ".ptned3 PARTITION(b=1)");
    verifySetup("SELECT a from " + dbName + ".ptned2 WHERE b=1", ptn_data_1);
    run("LOAD DATA LOCAL INPATH '" + ptn_locn_2 + "' OVERWRITE INTO TABLE " + dbName + ".ptned3 PARTITION(b=2)");
    verifySetup("SELECT a from " + dbName + ".ptned2 WHERE b=2", ptn_data_2);
    // At this point, we've set up all the tables and ptns we're going to test drops across
    // Replicate it first, and then we'll drop it on the source.
    advanceDumpDir();
    run("REPL DUMP " + dbName);
    String replDumpLocn = getResult(0, 0);
    String replDumpId = getResult(0, 1, true);
    run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'");
    printOutput();
    run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'");
    verifySetup("REPL STATUS " + dbName + "_dupe", new String[] { replDumpId });
    verifySetup("SELECT * from " + dbName + "_dupe.unptned", unptn_data);
    verifySetup("SELECT a from " + dbName + "_dupe.ptned WHERE b='1'", ptn_data_1);
    verifySetup("SELECT a from " + dbName + "_dupe.ptned WHERE b='2'", ptn_data_2);
    verifySetup("SELECT a from " + dbName + "_dupe.ptned2 WHERE b='1'", ptn_data_1);
    verifySetup("SELECT a from " + dbName + "_dupe.ptned2 WHERE b='2'", ptn_data_2);
    verifySetup("SELECT a from " + dbName + "_dupe.ptned3 WHERE b=1", ptn_data_1);
    verifySetup("SELECT a from " + dbName + "_dupe.ptned3 WHERE b=2", ptn_data_2);
    // All tables good on destination, drop on source.
    run("DROP TABLE " + dbName + ".unptned");
    run("ALTER TABLE " + dbName + ".ptned DROP PARTITION (b='2')");
    run("DROP TABLE " + dbName + ".ptned2");
    run("ALTER TABLE " + dbName + ".ptned3 DROP PARTITION (b=1)");
    verifySetup("SELECT a from " + dbName + ".ptned WHERE b='2'", empty);
    verifySetup("SELECT a from " + dbName + ".ptned", ptn_data_1);
    verifySetup("SELECT a from " + dbName + ".ptned3 WHERE b=1", empty);
    verifySetup("SELECT a from " + dbName + ".ptned3", ptn_data_2);
    // replicate the incremental drops
    advanceDumpDir();
    run("REPL DUMP " + dbName + " FROM " + replDumpId);
    String postDropReplDumpLocn = getResult(0, 0);
    String postDropReplDumpId = getResult(0, 1, true);
    LOG.info("Dumped to {} with id {}->{}", postDropReplDumpLocn, replDumpId, postDropReplDumpId);
    run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + postDropReplDumpLocn + "'");
    printOutput();
    run("REPL LOAD " + dbName + "_dupe FROM '" + postDropReplDumpLocn + "'");
    // verify that drops were replicated. This can either be from tables or ptns
    // not existing, and thus, throwing a NoSuchObjectException, or returning nulls
    // or select * returning empty, depending on what we're testing.
    Exception e = null;
    try {
        Table tbl = metaStoreClient.getTable(dbName + "_dupe", "unptned");
        assertNull(tbl);
    } catch (TException te) {
        e = te;
    }
    assertNotNull(e);
    assertEquals(NoSuchObjectException.class, e.getClass());
    verifyRun("SELECT a from " + dbName + "_dupe.ptned WHERE b='2'", empty);
    verifyRun("SELECT a from " + dbName + "_dupe.ptned", ptn_data_1);
    verifyRun("SELECT a from " + dbName + "_dupe.ptned3 WHERE b=1", empty);
    verifyRun("SELECT a from " + dbName + "_dupe.ptned3", ptn_data_2);
    Exception e2 = null;
    try {
        Table tbl = metaStoreClient.getTable(dbName + "_dupe", "ptned2");
        assertNull(tbl);
    } catch (TException te) {
        e2 = te;
    }
    assertNotNull(e2);
    assertEquals(NoSuchObjectException.class, e.getClass());
}
Also used : Path(org.apache.hadoop.fs.Path) TException(org.apache.thrift.TException) Table(org.apache.hadoop.hive.metastore.api.Table) TException(org.apache.thrift.TException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) Test(org.junit.Test)

Example 19 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project metacat by Netflix.

the class HiveConnectorTableService method create.

/**
     * Create a table.
     *
     * @param requestContext The request context
     * @param tableInfo      The resource metadata
     */
@Override
public void create(@Nonnull @NonNull final ConnectorContext requestContext, @Nonnull @NonNull final TableInfo tableInfo) {
    final QualifiedName tableName = tableInfo.getName();
    try {
        final Table table = hiveMetacatConverters.fromTableInfo(tableInfo);
        updateTable(requestContext, table, tableInfo);
        metacatHiveClient.createTable(table);
    } catch (AlreadyExistsException exception) {
        throw new TableAlreadyExistsException(tableName, exception);
    } catch (MetaException exception) {
        throw new InvalidMetaException(tableName, exception);
    } catch (NoSuchObjectException | InvalidObjectException exception) {
        throw new DatabaseNotFoundException(QualifiedName.ofDatabase(tableName.getCatalogName(), tableName.getDatabaseName()), exception);
    } catch (TException exception) {
        throw new ConnectorException(String.format("Failed create hive table %s", tableName), exception);
    }
}
Also used : TException(org.apache.thrift.TException) TableAlreadyExistsException(com.netflix.metacat.common.server.connectors.exception.TableAlreadyExistsException) Table(org.apache.hadoop.hive.metastore.api.Table) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) TableAlreadyExistsException(com.netflix.metacat.common.server.connectors.exception.TableAlreadyExistsException) QualifiedName(com.netflix.metacat.common.QualifiedName) DatabaseNotFoundException(com.netflix.metacat.common.server.connectors.exception.DatabaseNotFoundException) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException)

Example 20 with NoSuchObjectException

use of org.apache.hadoop.hive.metastore.api.NoSuchObjectException in project metacat by Netflix.

the class HiveConnectorPartitionService method getPartitionKeys.

/**
 * {@inheritDoc}.
 */
@Override
public List<String> getPartitionKeys(final ConnectorRequestContext requestContext, final QualifiedName tableName, final PartitionListRequest partitionsRequest) {
    final String filterExpression = partitionsRequest.getFilter();
    final List<String> partitionIds = partitionsRequest.getPartitionNames();
    List<String> names = Lists.newArrayList();
    final Pageable pageable = partitionsRequest.getPageable();
    try {
        if (filterExpression != null || (partitionIds != null && !partitionIds.isEmpty())) {
            final Table table = metacatHiveClient.getTableByName(tableName.getDatabaseName(), tableName.getTableName());
            for (Partition partition : getPartitions(tableName, filterExpression, partitionIds, partitionsRequest.getSort(), pageable)) {
                names.add(getNameOfPartition(table, partition));
            }
        } else {
            names = metacatHiveClient.getPartitionNames(tableName.getDatabaseName(), tableName.getTableName());
            return ConnectorUtils.paginate(names, pageable);
        }
    } catch (NoSuchObjectException exception) {
        throw new TableNotFoundException(tableName, exception);
    } catch (MetaException | InvalidObjectException e) {
        throw new InvalidMetaException("Invalid metadata for " + tableName, e);
    } catch (TException e) {
        throw new ConnectorException(String.format("Failed get partitions keys for hive table %s", tableName), e);
    }
    return names;
}
Also used : TException(org.apache.thrift.TException) Partition(org.apache.hadoop.hive.metastore.api.Partition) Table(org.apache.hadoop.hive.metastore.api.Table) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException) TableNotFoundException(com.netflix.metacat.common.server.connectors.exception.TableNotFoundException) Pageable(com.netflix.metacat.common.dto.Pageable) ConnectorException(com.netflix.metacat.common.server.connectors.exception.ConnectorException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) InvalidMetaException(com.netflix.metacat.common.server.connectors.exception.InvalidMetaException)

Aggregations

NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)144 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)74 TException (org.apache.thrift.TException)55 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)47 Table (org.apache.hadoop.hive.metastore.api.Table)45 Partition (org.apache.hadoop.hive.metastore.api.Partition)44 ArrayList (java.util.ArrayList)42 IOException (java.io.IOException)39 InvalidOperationException (org.apache.hadoop.hive.metastore.api.InvalidOperationException)36 AlreadyExistsException (org.apache.hadoop.hive.metastore.api.AlreadyExistsException)30 Test (org.junit.Test)24 Database (org.apache.hadoop.hive.metastore.api.Database)22 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)21 InvalidInputException (org.apache.hadoop.hive.metastore.api.InvalidInputException)20 UnknownDBException (org.apache.hadoop.hive.metastore.api.UnknownDBException)20 Path (org.apache.hadoop.fs.Path)19 Query (javax.jdo.Query)17 SQLException (java.sql.SQLException)16 ConnectorException (com.netflix.metacat.common.server.connectors.exception.ConnectorException)13 InvalidMetaException (com.netflix.metacat.common.server.connectors.exception.InvalidMetaException)13