Search in sources :

Example 6 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class HBaseUtils method deserializeForeignKeys.

static List<SQLForeignKey> deserializeForeignKeys(String dbName, String tableName, byte[] value) throws InvalidProtocolBufferException {
    List<SQLForeignKey> result = new ArrayList<>();
    HbaseMetastoreProto.ForeignKeys protoConstraints = HbaseMetastoreProto.ForeignKeys.parseFrom(value);
    for (HbaseMetastoreProto.ForeignKeys.ForeignKey protoFk : protoConstraints.getFksList()) {
        for (HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn protoFkCol : protoFk.getColsList()) {
            result.add(new SQLForeignKey(protoFk.getReferencedDbName(), protoFk.getReferencedTableName(), protoFkCol.getReferencedColumnName(), dbName, tableName, protoFkCol.getColumnName(), protoFkCol.getKeySeq(), protoFk.getUpdateRule(), protoFk.getDeleteRule(), protoFk.getFkName(), protoFk.getReferencedPkName(), protoFk.getEnableConstraint(), protoFk.getValidateConstraint(), protoFk.getRelyConstraint()));
        }
    }
    return result;
}
Also used : SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey)

Example 7 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class TestHBaseStore method addMultiColForeignKey.

// Test that we can create a foreign key with multiple columns
@Test
public void addMultiColForeignKey() throws Exception {
    String tableName = "mcfktable";
    String pkTable = "pktable";
    String pkName = "test_pk";
    String fkName = "test_fk";
    String[] fkColNames = { "col0", "col1", "col2" };
    String[] pkColNames = { "pcol0", "pcol1" };
    Table table = createMultiColumnTable(tableName, "int", "double", "timestamp");
    List<SQLForeignKey> fk = Arrays.asList(new SQLForeignKey(DB, pkTable, pkColNames[0], DB, tableName, fkColNames[1], 0, 1, 2, fkName, pkName, true, false, false), new SQLForeignKey(DB, pkTable, pkColNames[1], DB, tableName, fkColNames[2], 1, 1, 2, fkName, pkName, true, false, false));
    store.createTable(table);
    store.addForeignKeys(fk);
    fk = store.getForeignKeys(DB, pkTable, DB, tableName);
    Assert.assertNotNull(fk);
    Assert.assertEquals(2, fk.size());
    SQLForeignKey[] sorted = fk.toArray(new SQLForeignKey[2]);
    Arrays.sort(sorted, new Comparator<SQLForeignKey>() {

        @Override
        public int compare(SQLForeignKey o1, SQLForeignKey o2) {
            if (o1.getFk_name().equals(o2.getFk_name())) {
                return o1.getFkcolumn_name().compareTo(o2.getFkcolumn_name());
            } else {
                return o1.getFk_name().compareTo(o2.getFk_name());
            }
        }
    });
    for (int i = 0; i < 2; i++) {
        Assert.assertEquals(DB, sorted[i].getPktable_db());
        Assert.assertEquals(pkTable, sorted[i].getPktable_name());
        Assert.assertEquals(pkColNames[i], sorted[i].getPkcolumn_name());
        Assert.assertEquals(DB, sorted[i].getFktable_db());
        Assert.assertEquals(tableName, sorted[i].getFktable_name());
        Assert.assertEquals(fkColNames[i + 1], sorted[i].getFkcolumn_name());
        Assert.assertEquals(i, sorted[i].getKey_seq());
        Assert.assertEquals(1, sorted[i].getUpdate_rule());
        Assert.assertEquals(2, sorted[i].getDelete_rule());
        Assert.assertEquals(fkName, sorted[i].getFk_name());
        Assert.assertEquals(pkName, sorted[i].getPk_name());
        Assert.assertTrue(sorted[i].isEnable_cstr());
        Assert.assertFalse(sorted[i].isValidate_cstr());
        Assert.assertFalse(sorted[i].isRely_cstr());
    }
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Test(org.junit.Test)

Example 8 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class TestHBaseStore method addMultiForeignKeys.

// Test that we can add 2 foreign keys at once
@Test
public void addMultiForeignKeys() throws Exception {
    String tableName = "mcfktable";
    String pkTable = "pktable";
    String pkTable2 = "pktable2";
    String pkName = "test_pk";
    String pkName2 = "test_pk2";
    String fkName = "test_fk";
    String fkName2 = "test_fk2";
    String[] fkColNames = { "col0", "col1", "col2" };
    String[] pkColNames = { "pcol0", "pcol1" };
    String[] pkColNames2 = { "p2col0" };
    Table table = createMultiColumnTable(tableName, "int", "double", "timestamp");
    List<SQLForeignKey> fk = Arrays.asList(new SQLForeignKey(DB, pkTable, pkColNames[0], DB, tableName, fkColNames[1], 0, 1, 2, fkName, pkName, true, false, true), new SQLForeignKey(DB, pkTable, pkColNames[1], DB, tableName, fkColNames[2], 1, 1, 2, fkName, pkName, true, false, true), new SQLForeignKey(DB, pkTable2, pkColNames2[0], DB, tableName, fkColNames[0], 0, 1, 2, fkName2, pkName2, true, false, true));
    store.createTable(table);
    store.addForeignKeys(fk);
    fk = store.getForeignKeys(DB, pkTable, DB, tableName);
    Assert.assertNotNull(fk);
    Assert.assertEquals(2, fk.size());
    SQLForeignKey[] sorted = fk.toArray(new SQLForeignKey[2]);
    Arrays.sort(sorted, new Comparator<SQLForeignKey>() {

        @Override
        public int compare(SQLForeignKey o1, SQLForeignKey o2) {
            if (o1.getFk_name().equals(o2.getFk_name())) {
                return o1.getFkcolumn_name().compareTo(o2.getFkcolumn_name());
            } else {
                return o1.getFk_name().compareTo(o2.getFk_name());
            }
        }
    });
    for (int i = 0; i < 2; i++) {
        Assert.assertEquals(DB, sorted[i].getPktable_db());
        Assert.assertEquals(pkTable, sorted[i].getPktable_name());
        Assert.assertEquals(pkColNames[i], sorted[i].getPkcolumn_name());
        Assert.assertEquals(DB, sorted[i].getFktable_db());
        Assert.assertEquals(tableName, sorted[i].getFktable_name());
        Assert.assertEquals(fkColNames[i + 1], sorted[i].getFkcolumn_name());
        Assert.assertEquals(i, sorted[i].getKey_seq());
        Assert.assertEquals(1, sorted[i].getUpdate_rule());
        Assert.assertEquals(2, sorted[i].getDelete_rule());
        Assert.assertEquals(fkName, sorted[i].getFk_name());
        Assert.assertEquals(pkName, sorted[i].getPk_name());
        Assert.assertTrue(sorted[i].isEnable_cstr());
        Assert.assertFalse(sorted[i].isValidate_cstr());
        Assert.assertTrue(sorted[i].isRely_cstr());
    }
    fk = store.getForeignKeys(DB, pkTable2, DB, tableName);
    Assert.assertNotNull(fk);
    Assert.assertEquals(1, fk.size());
    Assert.assertEquals(DB, fk.get(0).getPktable_db());
    Assert.assertEquals(pkTable2, fk.get(0).getPktable_name());
    Assert.assertEquals(pkColNames2[0], fk.get(0).getPkcolumn_name());
    Assert.assertEquals(DB, fk.get(0).getFktable_db());
    Assert.assertEquals(tableName, fk.get(0).getFktable_name());
    Assert.assertEquals(fkColNames[0], fk.get(0).getFkcolumn_name());
    Assert.assertEquals(0, fk.get(0).getKey_seq());
    Assert.assertEquals(1, fk.get(0).getUpdate_rule());
    Assert.assertEquals(2, fk.get(0).getDelete_rule());
    Assert.assertEquals(fkName2, fk.get(0).getFk_name());
    Assert.assertEquals(pkName2, fk.get(0).getPk_name());
    Assert.assertTrue(fk.get(0).isEnable_cstr());
    Assert.assertFalse(fk.get(0).isValidate_cstr());
    Assert.assertTrue(fk.get(0).isRely_cstr());
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Test(org.junit.Test)

Example 9 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class DDLTask method createTable.

/**
 * Create a new table.
 *
 * @param db
 *          The database in question.
 * @param crtTbl
 *          This is the table we're creating.
 * @return Returns 0 when execution succeeds and above 0 if it fails.
 * @throws HiveException
 *           Throws this exception if an unexpected error occurs.
 */
private int createTable(Hive db, CreateTableDesc crtTbl) throws HiveException {
    // create the table
    Table tbl = crtTbl.toTable(conf);
    List<SQLPrimaryKey> primaryKeys = crtTbl.getPrimaryKeys();
    List<SQLForeignKey> foreignKeys = crtTbl.getForeignKeys();
    List<SQLUniqueConstraint> uniqueConstraints = crtTbl.getUniqueConstraints();
    List<SQLNotNullConstraint> notNullConstraints = crtTbl.getNotNullConstraints();
    List<SQLDefaultConstraint> defaultConstraints = crtTbl.getDefaultConstraints();
    List<SQLCheckConstraint> checkConstraints = crtTbl.getCheckConstraints();
    LOG.debug("creating table {} on {}", tbl.getFullyQualifiedName(), tbl.getDataLocation());
    if (crtTbl.getReplicationSpec().isInReplicationScope() && (!crtTbl.getReplaceMode())) {
        // if this is a replication spec, then replace-mode semantics might apply.
        // if we're already asking for a table replacement, then we can skip this check.
        // however, otherwise, if in replication scope, and we've not been explicitly asked
        // to replace, we should check if the object we're looking at exists, and if so,
        // trigger replace-mode semantics.
        Table existingTable = db.getTable(tbl.getDbName(), tbl.getTableName(), false);
        if (existingTable != null) {
            if (crtTbl.getReplicationSpec().allowEventReplacementInto(existingTable.getParameters())) {
                // we replace existing table.
                crtTbl.setReplaceMode(true);
            } else {
                LOG.debug("DDLTask: Create Table is skipped as table {} is newer than update", crtTbl.getTableName());
                // no replacement, the existing table state is newer than our update.
                return 0;
            }
        }
    }
    // create the table
    if (crtTbl.getReplaceMode()) {
        // replace-mode creates are really alters using CreateTableDesc.
        db.alterTable(tbl, null);
    } else {
        if ((foreignKeys != null && foreignKeys.size() > 0) || (primaryKeys != null && primaryKeys.size() > 0) || (uniqueConstraints != null && uniqueConstraints.size() > 0) || (notNullConstraints != null && notNullConstraints.size() > 0) || (checkConstraints != null && checkConstraints.size() > 0) || defaultConstraints != null && defaultConstraints.size() > 0) {
            db.createTable(tbl, crtTbl.getIfNotExists(), primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
        } else {
            db.createTable(tbl, crtTbl.getIfNotExists());
        }
        Long mmWriteId = crtTbl.getInitialMmWriteId();
        if (crtTbl.isCTAS() || mmWriteId != null) {
            Table createdTable = db.getTable(tbl.getDbName(), tbl.getTableName());
            if (crtTbl.isCTAS()) {
                DataContainer dc = new DataContainer(createdTable.getTTable());
                queryState.getLineageState().setLineage(createdTable.getPath(), dc, createdTable.getCols());
            }
        }
    }
    addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
    return 0;
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) TextMetaDataTable(org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataTable) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) DataContainer(org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 10 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableAddConstraint.

private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) throws SemanticException {
    ASTNode parent = (ASTNode) ast.getParent();
    String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
    ASTNode child = (ASTNode) ast.getChild(0);
    List<SQLPrimaryKey> primaryKeys = new ArrayList<>();
    List<SQLForeignKey> foreignKeys = new ArrayList<>();
    List<SQLUniqueConstraint> uniqueConstraints = new ArrayList<>();
    switch(child.getToken().getType()) {
        case HiveParser.TOK_UNIQUE:
            BaseSemanticAnalyzer.processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], child, uniqueConstraints);
            break;
        case HiveParser.TOK_PRIMARY_KEY:
            BaseSemanticAnalyzer.processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], child, primaryKeys);
            break;
        case HiveParser.TOK_FOREIGN_KEY:
            BaseSemanticAnalyzer.processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], child, foreignKeys);
            break;
        default:
            throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(child.getToken().getText()));
    }
    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, primaryKeys, foreignKeys, uniqueConstraints, null);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Aggregations

SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)46 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)28 Test (org.junit.Test)20 Table (org.apache.hadoop.hive.metastore.api.Table)19 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)15 ArrayList (java.util.ArrayList)13 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)13 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)13 SQLForeignKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder)13 SQLPrimaryKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder)13 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)12 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)11 ForeignKeysRequest (org.apache.hadoop.hive.metastore.api.ForeignKeysRequest)9 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)6 IOException (java.io.IOException)5 HashMap (java.util.HashMap)5 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)5 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)5 TApplicationException (org.apache.thrift.TApplicationException)5 Tree (org.antlr.runtime.tree.Tree)3