Search in sources :

Example 1 with PrimaryKeysRequest

use of org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest in project hive by apache.

the class TestReplicationScenarios method testSkipTables.

@Test
public void testSkipTables() throws Exception {
    String testName = "skipTables";
    String dbName = createDB(testName, driver);
    // TODO: this is wrong; this test sets up dummy txn manager and so it cannot create ACID tables.
    // If I change it to use proper txn manager, the setup for some tests hangs.
    // This used to work by accident, now this works due a test flag. The test needs to be fixed.
    // Create table
    run("CREATE TABLE " + dbName + ".acid_table (key int, value int) PARTITIONED BY (load_date date) " + "CLUSTERED BY(key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true')", driver);
    run("CREATE TABLE " + dbName + ".mm_table (key int, value int) PARTITIONED BY (load_date date) " + "CLUSTERED BY(key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true'," + " 'transactional_properties'='insert_only')", driver);
    verifyIfTableExist(dbName, "acid_table", metaStoreClient);
    verifyIfTableExist(dbName, "mm_table", metaStoreClient);
    // Bootstrap test
    advanceDumpDir();
    run("REPL DUMP " + dbName, driver);
    String replDumpLocn = getResult(0, 0, driver);
    String replDumpId = getResult(0, 1, true, driver);
    LOG.info("Bootstrap-Dump: Dumped to {} with id {}", replDumpLocn, replDumpId);
    run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'", driverMirror);
    verifyIfTableNotExist(dbName + "_dupe", "acid_table", metaStoreClientMirror);
    verifyIfTableNotExist(dbName + "_dupe", "mm_table", metaStoreClientMirror);
    // Test alter table
    run("ALTER TABLE " + dbName + ".acid_table RENAME TO " + dbName + ".acid_table_rename", driver);
    verifyIfTableExist(dbName, "acid_table_rename", metaStoreClient);
    // Perform REPL-DUMP/LOAD
    advanceDumpDir();
    run("REPL DUMP " + dbName + " FROM " + replDumpId, driver);
    String incrementalDumpLocn = getResult(0, 0, driver);
    String incrementalDumpId = getResult(0, 1, true, driver);
    LOG.info("Incremental-dump: Dumped to {} with id {}", incrementalDumpLocn, incrementalDumpId);
    run("REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + "'", driverMirror);
    verifyIfTableNotExist(dbName + "_dupe", "acid_table_rename", metaStoreClientMirror);
    // Create another table for incremental repl verification
    run("CREATE TABLE " + dbName + ".acid_table_incremental (key int, value int) PARTITIONED BY (load_date date) " + "CLUSTERED BY(key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true')", driver);
    run("CREATE TABLE " + dbName + ".mm_table_incremental (key int, value int) PARTITIONED BY (load_date date) " + "CLUSTERED BY(key) INTO 2 BUCKETS STORED AS ORC TBLPROPERTIES ('transactional'='true'," + " 'transactional_properties'='insert_only')", driver);
    verifyIfTableExist(dbName, "acid_table_incremental", metaStoreClient);
    verifyIfTableExist(dbName, "mm_table_incremental", metaStoreClient);
    // Perform REPL-DUMP/LOAD
    advanceDumpDir();
    run("REPL DUMP " + dbName + " FROM " + incrementalDumpId, driver);
    incrementalDumpLocn = getResult(0, 0, driver);
    incrementalDumpId = getResult(0, 1, true, driver);
    LOG.info("Incremental-dump: Dumped to {} with id {}", incrementalDumpLocn, incrementalDumpId);
    run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + "'", driverMirror);
    printOutput(driverMirror);
    run("REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + "'", driverMirror);
    verifyIfTableNotExist(dbName + "_dupe", "acid_table_incremental", metaStoreClientMirror);
    verifyIfTableNotExist(dbName + "_dupe", "mm_table_incremental", metaStoreClientMirror);
    // Test adding a constraint
    run("ALTER TABLE " + dbName + ".acid_table_incremental ADD CONSTRAINT key_pk PRIMARY KEY (key) DISABLE NOVALIDATE", driver);
    try {
        List<SQLPrimaryKey> pks = metaStoreClient.getPrimaryKeys(new PrimaryKeysRequest(dbName, "acid_table_incremental"));
        assertEquals(pks.size(), 1);
    } catch (TException te) {
        assertNull(te);
    }
    // Perform REPL-DUMP/LOAD
    advanceDumpDir();
    run("REPL DUMP " + dbName + " FROM " + incrementalDumpId, driver);
    incrementalDumpLocn = getResult(0, 0, driver);
    incrementalDumpId = getResult(0, 1, true, driver);
    LOG.info("Incremental-dump: Dumped to {} with id {}", incrementalDumpLocn, incrementalDumpId);
    run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + "'", driverMirror);
    printOutput(driverMirror);
    run("REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + "'", driverMirror);
    verifyIfTableNotExist(dbName + "_dupe", "acid_table_incremental", metaStoreClientMirror);
}
Also used : TException(org.apache.thrift.TException) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) Test(org.junit.Test)

Example 2 with PrimaryKeysRequest

use of org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest in project hive by apache.

the class TestReplicationScenarios method testConstraints.

@Test
public void testConstraints() throws IOException {
    String testName = "constraints";
    String dbName = createDB(testName, driver);
    String replDbName = dbName + "_dupe";
    run("CREATE TABLE " + dbName + ".tbl1(a string, b string, primary key (a, b) disable novalidate rely)", driver);
    run("CREATE TABLE " + dbName + ".tbl2(a string, b string, foreign key (a, b) references " + dbName + ".tbl1(a, b) disable novalidate)", driver);
    run("CREATE TABLE " + dbName + ".tbl3(a string, b string not null disable, unique (a) disable)", driver);
    run("CREATE TABLE " + dbName + ".tbl7(a string CHECK (a like 'a%'), price double CHECK (price > 0 AND price <= 1000))", driver);
    run("CREATE TABLE " + dbName + ".tbl8(a string, b int DEFAULT 0)", driver);
    Tuple bootstrapDump = bootstrapLoadAndVerify(dbName, replDbName);
    String replDumpId = bootstrapDump.lastReplId;
    try {
        List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(replDbName, "tbl1"));
        assertEquals(pks.size(), 2);
        List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl3"));
        assertEquals(uks.size(), 1);
        List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, replDbName, "tbl2"));
        assertEquals(fks.size(), 2);
        List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl3"));
        assertEquals(nns.size(), 1);
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl7"));
        assertEquals(cks.size(), 2);
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl8"));
        assertEquals(dks.size(), 1);
    } catch (TException te) {
        assertNull(te);
    }
    run("CREATE TABLE " + dbName + ".tbl4(a string, b string, primary key (a, b) disable novalidate rely)", driver);
    run("CREATE TABLE " + dbName + ".tbl5(a string, b string, foreign key (a, b) references " + dbName + ".tbl4(a, b) disable novalidate)", driver);
    run("CREATE TABLE " + dbName + ".tbl6(a string, b string not null disable, unique (a) disable)", driver);
    run("CREATE TABLE " + dbName + ".tbl9(a string CHECK (a like 'a%'), price double CHECK (price > 0 AND price <= 1000))", driver);
    run("CREATE TABLE " + dbName + ".tbl10(a string, b int DEFAULT 0)", driver);
    Tuple incrementalDump = incrementalLoadAndVerify(dbName, replDbName);
    replDumpId = incrementalDump.lastReplId;
    String pkName = null;
    String ukName = null;
    String fkName = null;
    String nnName = null;
    String dkName1 = null;
    String ckName1 = null;
    String ckName2 = null;
    try {
        List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(replDbName, "tbl4"));
        assertEquals(pks.size(), 2);
        pkName = pks.get(0).getPk_name();
        List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl6"));
        assertEquals(uks.size(), 1);
        ukName = uks.get(0).getUk_name();
        List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, replDbName, "tbl5"));
        assertEquals(fks.size(), 2);
        fkName = fks.get(0).getFk_name();
        List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl6"));
        assertEquals(nns.size(), 1);
        nnName = nns.get(0).getNn_name();
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl9"));
        assertEquals(cks.size(), 2);
        ckName1 = cks.get(0).getDc_name();
        ckName2 = cks.get(1).getDc_name();
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertEquals(dks.size(), 1);
        dkName1 = dks.get(0).getDc_name();
    } catch (TException te) {
        assertNull(te);
    }
    String dkName2 = "custom_dk_name";
    String ckName3 = "customer_ck_name";
    run("ALTER TABLE " + dbName + ".tbl10 CHANGE COLUMN a a string CONSTRAINT " + ckName3 + " CHECK (a like 'a%')", driver);
    run("ALTER TABLE " + dbName + ".tbl10 CHANGE COLUMN b b int CONSTRAINT " + dkName2 + " DEFAULT 1 ENABLE", driver);
    incrementalLoadAndVerify(dbName, replDbName);
    try {
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertEquals(dks.size(), 2);
        assertEquals(dks.get(1).getDefault_value(), "1");
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertEquals(cks.size(), 1);
        assertEquals(cks.get(0).getDc_name(), ckName3);
    } catch (TException te) {
        assertNull(te);
    }
    run("ALTER TABLE " + dbName + ".tbl4 DROP CONSTRAINT `" + pkName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl4 DROP CONSTRAINT `" + ukName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl5 DROP CONSTRAINT `" + fkName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl6 DROP CONSTRAINT `" + nnName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl9 DROP CONSTRAINT `" + ckName1 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl9 DROP CONSTRAINT `" + ckName2 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl10 DROP CONSTRAINT `" + ckName3 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl10 DROP CONSTRAINT `" + dkName1 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl10 DROP CONSTRAINT `" + dkName2 + "`", driver);
    incrementalLoadAndVerify(dbName, replDbName);
    try {
        List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(replDbName, "tbl4"));
        assertTrue(pks.isEmpty());
        List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl4"));
        assertTrue(uks.isEmpty());
        List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, replDbName, "tbl5"));
        assertTrue(fks.isEmpty());
        List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl6"));
        assertTrue(nns.isEmpty());
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertTrue(dks.isEmpty());
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl9"));
        assertTrue(cks.isEmpty());
        cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertTrue(cks.isEmpty());
        dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl12"));
        assertTrue(dks.isEmpty());
        cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl12"));
        assertTrue(cks.isEmpty());
    } catch (TException te) {
        assertNull(te);
    }
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) UniqueConstraintsRequest(org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest) TException(org.apache.thrift.TException) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) NotNullConstraintsRequest(org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) ForeignKeysRequest(org.apache.hadoop.hive.metastore.api.ForeignKeysRequest) DefaultConstraintsRequest(org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest) CheckConstraintsRequest(org.apache.hadoop.hive.metastore.api.CheckConstraintsRequest) Test(org.junit.Test)

Example 3 with PrimaryKeysRequest

use of org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest in project hive by apache.

the class GetColumnsOperation method runInternal.

@Override
public void runInternal() throws HiveSQLException {
    setState(OperationState.RUNNING);
    LOG.info("Fetching column metadata");
    try {
        IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
        String schemaPattern = convertSchemaPattern(schemaName);
        String tablePattern = convertIdentifierPattern(tableName, true);
        Pattern columnPattern = null;
        if (columnName != null) {
            columnPattern = Pattern.compile(convertIdentifierPattern(columnName, false));
        }
        List<String> dbNames = metastoreClient.getDatabases(schemaPattern);
        Collections.sort(dbNames);
        Map<String, List<String>> db2Tabs = new HashMap<>();
        for (String dbName : dbNames) {
            List<String> tableNames = metastoreClient.getTables(dbName, tablePattern);
            Collections.sort(tableNames);
            db2Tabs.put(dbName, tableNames);
        }
        if (isAuthV2Enabled()) {
            List<HivePrivilegeObject> privObjs = getPrivObjs(db2Tabs);
            String cmdStr = "catalog : " + catalogName + ", schemaPattern : " + schemaName + ", tablePattern : " + tableName;
            authorizeMetaGets(HiveOperationType.GET_COLUMNS, privObjs, cmdStr);
        }
        int maxBatchSize = SessionState.get().getConf().getIntVar(ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
        for (Entry<String, List<String>> dbTabs : db2Tabs.entrySet()) {
            String dbName = dbTabs.getKey();
            List<String> tableNames = dbTabs.getValue();
            for (Table table : new TableIterable(metastoreClient, dbName, tableNames, maxBatchSize)) {
                TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName()));
                List<SQLPrimaryKey> primaryKeys = metastoreClient.getPrimaryKeys(new PrimaryKeysRequest(dbName, table.getTableName()));
                Set<String> pkColNames = new HashSet<>();
                for (SQLPrimaryKey key : primaryKeys) {
                    pkColNames.add(key.getColumn_name().toLowerCase());
                }
                for (ColumnDescriptor column : schema.getColumnDescriptors()) {
                    if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) {
                        continue;
                    }
                    Object[] rowData = new Object[] { // TABLE_CAT
                    null, // TABLE_SCHEM
                    table.getDbName(), // TABLE_NAME
                    table.getTableName(), // COLUMN_NAME
                    column.getName(), // DATA_TYPE
                    column.getType().toJavaSQLType(), // TYPE_NAME
                    column.getTypeName(), // COLUMN_SIZE
                    column.getTypeDescriptor().getColumnSize(), // BUFFER_LENGTH, unused
                    null, // DECIMAL_DIGITS
                    column.getTypeDescriptor().getDecimalDigits(), // NUM_PREC_RADIX
                    column.getType().getNumPrecRadix(), pkColNames.contains(column.getName().toLowerCase()) ? DatabaseMetaData.columnNoNulls : // NULLABLE
                    DatabaseMetaData.columnNullable, // REMARKS
                    column.getComment(), // COLUMN_DEF
                    null, // SQL_DATA_TYPE
                    null, // SQL_DATETIME_SUB
                    null, // CHAR_OCTET_LENGTH
                    null, // ORDINAL_POSITION
                    column.getOrdinalPosition(), // IS_NULLABLE
                    pkColNames.contains(column.getName().toLowerCase()) ? "NO" : "YES", // SCOPE_CATALOG
                    null, // SCOPE_SCHEMA
                    null, // SCOPE_TABLE
                    null, // SOURCE_DATA_TYPE
                    null, // IS_AUTO_INCREMENT
                    "NO" };
                    rowSet.addRow(rowData);
                    if (LOG.isDebugEnabled()) {
                        String debugMessage = getDebugMessage("column", RESULT_SET_SCHEMA);
                        LOG.debug(debugMessage, rowData);
                    }
                }
            }
        }
        if (LOG.isDebugEnabled() && rowSet.numRows() == 0) {
            LOG.debug("No column metadata has been returned.");
        }
        setState(OperationState.FINISHED);
        LOG.info("Fetching column metadata has been successfully finished");
    } catch (Exception e) {
        setState(OperationState.ERROR);
        throw new HiveSQLException(e);
    }
}
Also used : Pattern(java.util.regex.Pattern) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) TableSchema(org.apache.hive.service.cli.TableSchema) HashMap(java.util.HashMap) ColumnDescriptor(org.apache.hive.service.cli.ColumnDescriptor) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) TableIterable(org.apache.hadoop.hive.metastore.TableIterable) IMetaStoreClient(org.apache.hadoop.hive.metastore.IMetaStoreClient) HiveSQLException(org.apache.hive.service.cli.HiveSQLException) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) ArrayList(java.util.ArrayList) List(java.util.List) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) HashSet(java.util.HashSet)

Example 4 with PrimaryKeysRequest

use of org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest in project hive by apache.

the class NonCatCallsWithCatalog method createTableWithConstraints.

@Test
public void createTableWithConstraints() throws TException {
    Table parentTable = testTables[2];
    Table table = new TableBuilder().setTableName("table_in_other_catalog_with_constraints").addCol("col1", "int").addCol("col2", "varchar(32)").addCol("col3", "int").addCol("col4", "int").addCol("col5", "int").addCol("col6", "int").build(conf);
    table.unsetCatName();
    List<SQLPrimaryKey> parentPk = new SQLPrimaryKeyBuilder().onTable(parentTable).addColumn("test_col1").build(conf);
    for (SQLPrimaryKey pkcol : parentPk) {
        pkcol.unsetCatName();
    }
    client.addPrimaryKey(parentPk);
    List<SQLPrimaryKey> pk = new SQLPrimaryKeyBuilder().onTable(table).addColumn("col2").build(conf);
    for (SQLPrimaryKey pkcol : pk) {
        pkcol.unsetCatName();
    }
    List<SQLForeignKey> fk = new SQLForeignKeyBuilder().fromPrimaryKey(parentPk).onTable(table).addColumn("col1").build(conf);
    for (SQLForeignKey fkcol : fk) {
        fkcol.unsetCatName();
    }
    List<SQLDefaultConstraint> dv = new SQLDefaultConstraintBuilder().onTable(table).addColumn("col3").setDefaultVal(0).build(conf);
    for (SQLDefaultConstraint dccol : dv) {
        dccol.unsetCatName();
    }
    List<SQLNotNullConstraint> nn = new SQLNotNullConstraintBuilder().onTable(table).addColumn("col4").build(conf);
    for (SQLNotNullConstraint nncol : nn) {
        nncol.unsetCatName();
    }
    List<SQLUniqueConstraint> uc = new SQLUniqueConstraintBuilder().onTable(table).addColumn("col5").build(conf);
    for (SQLUniqueConstraint uccol : uc) {
        uccol.unsetCatName();
    }
    List<SQLCheckConstraint> cc = new SQLCheckConstraintBuilder().onTable(table).addColumn("col6").setCheckExpression("> 0").build(conf);
    for (SQLCheckConstraint cccol : cc) {
        cccol.unsetCatName();
    }
    client.createTableWithConstraints(table, pk, fk, uc, nn, dv, cc);
    PrimaryKeysRequest pkRqst = new PrimaryKeysRequest(parentTable.getDbName(), parentTable.getTableName());
    pkRqst.setCatName(parentTable.getCatName());
    List<SQLPrimaryKey> pkFetched = client.getPrimaryKeys(pkRqst);
    Assert.assertEquals(1, pkFetched.size());
    Assert.assertEquals(expectedCatalog(), pkFetched.get(0).getCatName());
    Assert.assertEquals(parentTable.getDbName(), pkFetched.get(0).getTable_db());
    Assert.assertEquals(parentTable.getTableName(), pkFetched.get(0).getTable_name());
    Assert.assertEquals("test_col1", pkFetched.get(0).getColumn_name());
    Assert.assertEquals(1, pkFetched.get(0).getKey_seq());
    Assert.assertTrue(pkFetched.get(0).isEnable_cstr());
    Assert.assertFalse(pkFetched.get(0).isValidate_cstr());
    Assert.assertFalse(pkFetched.get(0).isRely_cstr());
    Assert.assertEquals(parentTable.getCatName(), pkFetched.get(0).getCatName());
    ForeignKeysRequest fkRqst = new ForeignKeysRequest(parentTable.getDbName(), parentTable.getTableName(), table.getDbName(), table.getTableName());
    fkRqst.setCatName(table.getCatName());
    List<SQLForeignKey> fkFetched = client.getForeignKeys(fkRqst);
    Assert.assertEquals(1, fkFetched.size());
    Assert.assertEquals(expectedCatalog(), fkFetched.get(0).getCatName());
    Assert.assertEquals(table.getDbName(), fkFetched.get(0).getFktable_db());
    Assert.assertEquals(table.getTableName(), fkFetched.get(0).getFktable_name());
    Assert.assertEquals("col1", fkFetched.get(0).getFkcolumn_name());
    Assert.assertEquals(parentTable.getDbName(), fkFetched.get(0).getPktable_db());
    Assert.assertEquals(parentTable.getTableName(), fkFetched.get(0).getPktable_name());
    Assert.assertEquals(1, fkFetched.get(0).getKey_seq());
    Assert.assertEquals(parentTable.getTableName() + "_primary_key", fkFetched.get(0).getPk_name());
    Assert.assertTrue(fkFetched.get(0).isEnable_cstr());
    Assert.assertFalse(fkFetched.get(0).isValidate_cstr());
    Assert.assertFalse(fkFetched.get(0).isRely_cstr());
    Assert.assertEquals(table.getCatName(), fkFetched.get(0).getCatName());
    NotNullConstraintsRequest nnRqst = new NotNullConstraintsRequest(table.getCatName(), table.getDbName(), table.getTableName());
    List<SQLNotNullConstraint> nnFetched = client.getNotNullConstraints(nnRqst);
    Assert.assertEquals(1, nnFetched.size());
    Assert.assertEquals(table.getDbName(), nnFetched.get(0).getTable_db());
    Assert.assertEquals(table.getTableName(), nnFetched.get(0).getTable_name());
    Assert.assertEquals("col4", nnFetched.get(0).getColumn_name());
    Assert.assertEquals(table.getTableName() + "_not_null_constraint", nnFetched.get(0).getNn_name());
    Assert.assertTrue(nnFetched.get(0).isEnable_cstr());
    Assert.assertFalse(nnFetched.get(0).isValidate_cstr());
    Assert.assertFalse(nnFetched.get(0).isRely_cstr());
    Assert.assertEquals(table.getCatName(), nnFetched.get(0).getCatName());
    UniqueConstraintsRequest ucRqst = new UniqueConstraintsRequest(table.getCatName(), table.getDbName(), table.getTableName());
    List<SQLUniqueConstraint> ucFetched = client.getUniqueConstraints(ucRqst);
    Assert.assertEquals(1, ucFetched.size());
    Assert.assertEquals(table.getDbName(), ucFetched.get(0).getTable_db());
    Assert.assertEquals(table.getTableName(), ucFetched.get(0).getTable_name());
    Assert.assertEquals("col5", ucFetched.get(0).getColumn_name());
    Assert.assertEquals(1, ucFetched.get(0).getKey_seq());
    Assert.assertEquals(table.getTableName() + "_unique_constraint", ucFetched.get(0).getUk_name());
    Assert.assertTrue(ucFetched.get(0).isEnable_cstr());
    Assert.assertFalse(ucFetched.get(0).isValidate_cstr());
    Assert.assertFalse(ucFetched.get(0).isRely_cstr());
    Assert.assertEquals(table.getCatName(), ucFetched.get(0).getCatName());
    DefaultConstraintsRequest dcRqst = new DefaultConstraintsRequest(table.getCatName(), table.getDbName(), table.getTableName());
    List<SQLDefaultConstraint> dcFetched = client.getDefaultConstraints(dcRqst);
    Assert.assertEquals(1, dcFetched.size());
    Assert.assertEquals(expectedCatalog(), dcFetched.get(0).getCatName());
    Assert.assertEquals(table.getDbName(), dcFetched.get(0).getTable_db());
    Assert.assertEquals(table.getTableName(), dcFetched.get(0).getTable_name());
    Assert.assertEquals("col3", dcFetched.get(0).getColumn_name());
    Assert.assertEquals("0", dcFetched.get(0).getDefault_value());
    Assert.assertEquals(table.getTableName() + "_default_value", dcFetched.get(0).getDc_name());
    Assert.assertTrue(dcFetched.get(0).isEnable_cstr());
    Assert.assertFalse(dcFetched.get(0).isValidate_cstr());
    Assert.assertFalse(dcFetched.get(0).isRely_cstr());
    Assert.assertEquals(table.getCatName(), dcFetched.get(0).getCatName());
    CheckConstraintsRequest ccRqst = new CheckConstraintsRequest(table.getCatName(), table.getDbName(), table.getTableName());
    List<SQLCheckConstraint> ccFetched = client.getCheckConstraints(ccRqst);
    Assert.assertEquals(1, ccFetched.size());
    Assert.assertEquals(expectedCatalog(), ccFetched.get(0).getCatName());
    Assert.assertEquals(table.getDbName(), ccFetched.get(0).getTable_db());
    Assert.assertEquals(table.getTableName(), ccFetched.get(0).getTable_name());
    Assert.assertEquals("col6", ccFetched.get(0).getColumn_name());
    Assert.assertEquals("> 0", ccFetched.get(0).getCheck_expression());
    Assert.assertEquals(table.getTableName() + "_check_constraint", ccFetched.get(0).getDc_name());
    Assert.assertTrue(ccFetched.get(0).isEnable_cstr());
    Assert.assertFalse(ccFetched.get(0).isValidate_cstr());
    Assert.assertFalse(ccFetched.get(0).isRely_cstr());
    Assert.assertEquals(table.getCatName(), ccFetched.get(0).getCatName());
}
Also used : SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) NotNullConstraintsRequest(org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLDefaultConstraintBuilder) SQLUniqueConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLUniqueConstraintBuilder) SQLPrimaryKeyBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) UniqueConstraintsRequest(org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest) SQLForeignKeyBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLCheckConstraintBuilder) Table(org.apache.hadoop.hive.metastore.api.Table) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) ForeignKeysRequest(org.apache.hadoop.hive.metastore.api.ForeignKeysRequest) DefaultConstraintsRequest(org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest) SQLNotNullConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLNotNullConstraintBuilder) CheckConstraintsRequest(org.apache.hadoop.hive.metastore.api.CheckConstraintsRequest) Test(org.junit.Test)

Example 5 with PrimaryKeysRequest

use of org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest in project hive by apache.

the class TestPrimaryKey method getNoSuchTable.

@Test
public void getNoSuchTable() throws TException {
    PrimaryKeysRequest rqst = new PrimaryKeysRequest(DEFAULT_DATABASE_NAME, "nosuch");
    List<SQLPrimaryKey> pk = client.getPrimaryKeys(rqst);
    Assert.assertTrue(pk.isEmpty());
}
Also used : SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Aggregations

PrimaryKeysRequest (org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest)16 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)15 Test (org.junit.Test)13 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)9 Table (org.apache.hadoop.hive.metastore.api.Table)8 SQLPrimaryKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder)8 ForeignKeysRequest (org.apache.hadoop.hive.metastore.api.ForeignKeysRequest)3 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)3 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)3 IMetaStoreClient (org.apache.hadoop.hive.metastore.IMetaStoreClient)2 CheckConstraintsRequest (org.apache.hadoop.hive.metastore.api.CheckConstraintsRequest)2 DefaultConstraintsRequest (org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest)2 NotNullConstraintsRequest (org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest)2 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)2 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)2 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)2 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)2 UniqueConstraintsRequest (org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest)2 SQLForeignKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder)2 HiveSQLException (org.apache.hive.service.cli.HiveSQLException)2