Search in sources :

Example 1 with SQLUniqueConstraint

use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.

the class DDLTask method createTable.

/**
 * Create a new table.
 *
 * @param db
 *          The database in question.
 * @param crtTbl
 *          This is the table we're creating.
 * @return Returns 0 when execution succeeds and above 0 if it fails.
 * @throws HiveException
 *           Throws this exception if an unexpected error occurs.
 */
private int createTable(Hive db, CreateTableDesc crtTbl) throws HiveException {
    // create the table
    Table tbl = crtTbl.toTable(conf);
    List<SQLPrimaryKey> primaryKeys = crtTbl.getPrimaryKeys();
    List<SQLForeignKey> foreignKeys = crtTbl.getForeignKeys();
    List<SQLUniqueConstraint> uniqueConstraints = crtTbl.getUniqueConstraints();
    List<SQLNotNullConstraint> notNullConstraints = crtTbl.getNotNullConstraints();
    List<SQLDefaultConstraint> defaultConstraints = crtTbl.getDefaultConstraints();
    List<SQLCheckConstraint> checkConstraints = crtTbl.getCheckConstraints();
    LOG.debug("creating table {} on {}", tbl.getFullyQualifiedName(), tbl.getDataLocation());
    if (crtTbl.getReplicationSpec().isInReplicationScope() && (!crtTbl.getReplaceMode())) {
        // if this is a replication spec, then replace-mode semantics might apply.
        // if we're already asking for a table replacement, then we can skip this check.
        // however, otherwise, if in replication scope, and we've not been explicitly asked
        // to replace, we should check if the object we're looking at exists, and if so,
        // trigger replace-mode semantics.
        Table existingTable = db.getTable(tbl.getDbName(), tbl.getTableName(), false);
        if (existingTable != null) {
            if (crtTbl.getReplicationSpec().allowEventReplacementInto(existingTable.getParameters())) {
                // we replace existing table.
                crtTbl.setReplaceMode(true);
            } else {
                LOG.debug("DDLTask: Create Table is skipped as table {} is newer than update", crtTbl.getTableName());
                // no replacement, the existing table state is newer than our update.
                return 0;
            }
        }
    }
    // create the table
    if (crtTbl.getReplaceMode()) {
        // replace-mode creates are really alters using CreateTableDesc.
        db.alterTable(tbl, null);
    } else {
        if ((foreignKeys != null && foreignKeys.size() > 0) || (primaryKeys != null && primaryKeys.size() > 0) || (uniqueConstraints != null && uniqueConstraints.size() > 0) || (notNullConstraints != null && notNullConstraints.size() > 0) || (checkConstraints != null && checkConstraints.size() > 0) || defaultConstraints != null && defaultConstraints.size() > 0) {
            db.createTable(tbl, crtTbl.getIfNotExists(), primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
        } else {
            db.createTable(tbl, crtTbl.getIfNotExists());
        }
        Long mmWriteId = crtTbl.getInitialMmWriteId();
        if (crtTbl.isCTAS() || mmWriteId != null) {
            Table createdTable = db.getTable(tbl.getDbName(), tbl.getTableName());
            if (crtTbl.isCTAS()) {
                DataContainer dc = new DataContainer(createdTable.getTTable());
                queryState.getLineageState().setLineage(createdTable.getPath(), dc, createdTable.getCols());
            }
        }
    }
    addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
    return 0;
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) TextMetaDataTable(org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataTable) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) DataContainer(org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 2 with SQLUniqueConstraint

use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableAddConstraint.

private void analyzeAlterTableAddConstraint(ASTNode ast, String tableName) throws SemanticException {
    ASTNode parent = (ASTNode) ast.getParent();
    String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
    ASTNode child = (ASTNode) ast.getChild(0);
    List<SQLPrimaryKey> primaryKeys = new ArrayList<>();
    List<SQLForeignKey> foreignKeys = new ArrayList<>();
    List<SQLUniqueConstraint> uniqueConstraints = new ArrayList<>();
    switch(child.getToken().getType()) {
        case HiveParser.TOK_UNIQUE:
            BaseSemanticAnalyzer.processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], child, uniqueConstraints);
            break;
        case HiveParser.TOK_PRIMARY_KEY:
            BaseSemanticAnalyzer.processPrimaryKeys(qualifiedTabName[0], qualifiedTabName[1], child, primaryKeys);
            break;
        case HiveParser.TOK_FOREIGN_KEY:
            BaseSemanticAnalyzer.processForeignKeys(qualifiedTabName[0], qualifiedTabName[1], child, foreignKeys);
            break;
        default:
            throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(child.getToken().getText()));
    }
    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, primaryKeys, foreignKeys, uniqueConstraints, null);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 3 with SQLUniqueConstraint

use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.

the class DbNotificationListener method onAddUniqueConstraint.

/**
 * @param addUniqueConstraintEvent add unique constraint event
 * @throws MetaException
 */
@Override
public void onAddUniqueConstraint(AddUniqueConstraintEvent addUniqueConstraintEvent) throws MetaException {
    List<SQLUniqueConstraint> cols = addUniqueConstraintEvent.getUniqueConstraintCols();
    if (cols.size() > 0) {
        AddUniqueConstraintMessage msg = MessageBuilder.getInstance().buildAddUniqueConstraintMessage(addUniqueConstraintEvent.getUniqueConstraintCols());
        NotificationEvent event = new NotificationEvent(0, now(), EventType.ADD_UNIQUECONSTRAINT.toString(), msgEncoder.getSerializer().serialize(msg));
        event.setCatName(cols.get(0).isSetCatName() ? cols.get(0).getCatName() : DEFAULT_CATALOG_NAME);
        event.setDbName(cols.get(0).getTable_db());
        event.setTableName(cols.get(0).getTable_name());
        process(event, addUniqueConstraintEvent);
    }
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) AddUniqueConstraintMessage(org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage) NotificationEvent(org.apache.hadoop.hive.metastore.api.NotificationEvent)

Example 4 with SQLUniqueConstraint

use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.

the class AlterTableAddConstraintAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    // TODO CAT - for now always use the default catalog.  Eventually will want to see if
    // the user specified a catalog
    List<SQLPrimaryKey> primaryKeys = new ArrayList<>();
    List<SQLForeignKey> foreignKeys = new ArrayList<>();
    List<SQLUniqueConstraint> uniqueConstraints = new ArrayList<>();
    List<SQLCheckConstraint> checkConstraints = new ArrayList<>();
    ASTNode constraintNode = (ASTNode) command.getChild(0);
    switch(constraintNode.getToken().getType()) {
        case HiveParser.TOK_UNIQUE:
            ConstraintsUtils.processUniqueConstraints(tableName, constraintNode, uniqueConstraints);
            break;
        case HiveParser.TOK_PRIMARY_KEY:
            ConstraintsUtils.processPrimaryKeys(tableName, constraintNode, primaryKeys);
            break;
        case HiveParser.TOK_FOREIGN_KEY:
            ConstraintsUtils.processForeignKeys(tableName, constraintNode, foreignKeys);
            break;
        case HiveParser.TOK_CHECK_CONSTRAINT:
            ConstraintsUtils.processCheckConstraints(tableName, constraintNode, null, checkConstraints, command, ctx.getTokenRewriteStream());
            break;
        default:
            throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintNode.getToken().getText()));
    }
    Constraints constraints = new Constraints(primaryKeys, foreignKeys, null, uniqueConstraints, null, checkConstraints);
    AlterTableAddConstraintDesc desc = new AlterTableAddConstraintDesc(tableName, null, constraints);
    Table table = getTable(tableName);
    if (AcidUtils.isTransactionalTable(table)) {
        setAcidDdlDesc(desc);
    }
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 5 with SQLUniqueConstraint

use of org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint in project hive by apache.

the class TestReplicationScenarios method testConstraints.

@Test
public void testConstraints() throws IOException {
    String testName = "constraints";
    String dbName = createDB(testName, driver);
    String replDbName = dbName + "_dupe";
    run("CREATE TABLE " + dbName + ".tbl1(a string, b string, primary key (a, b) disable novalidate rely)", driver);
    run("CREATE TABLE " + dbName + ".tbl2(a string, b string, foreign key (a, b) references " + dbName + ".tbl1(a, b) disable novalidate)", driver);
    run("CREATE TABLE " + dbName + ".tbl3(a string, b string not null disable, unique (a) disable)", driver);
    run("CREATE TABLE " + dbName + ".tbl7(a string CHECK (a like 'a%'), price double CHECK (price > 0 AND price <= 1000))", driver);
    run("CREATE TABLE " + dbName + ".tbl8(a string, b int DEFAULT 0)", driver);
    Tuple bootstrapDump = bootstrapLoadAndVerify(dbName, replDbName);
    String replDumpId = bootstrapDump.lastReplId;
    try {
        List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(replDbName, "tbl1"));
        assertEquals(pks.size(), 2);
        List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl3"));
        assertEquals(uks.size(), 1);
        List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, replDbName, "tbl2"));
        assertEquals(fks.size(), 2);
        List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl3"));
        assertEquals(nns.size(), 1);
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl7"));
        assertEquals(cks.size(), 2);
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl8"));
        assertEquals(dks.size(), 1);
    } catch (TException te) {
        assertNull(te);
    }
    run("CREATE TABLE " + dbName + ".tbl4(a string, b string, primary key (a, b) disable novalidate rely)", driver);
    run("CREATE TABLE " + dbName + ".tbl5(a string, b string, foreign key (a, b) references " + dbName + ".tbl4(a, b) disable novalidate)", driver);
    run("CREATE TABLE " + dbName + ".tbl6(a string, b string not null disable, unique (a) disable)", driver);
    run("CREATE TABLE " + dbName + ".tbl9(a string CHECK (a like 'a%'), price double CHECK (price > 0 AND price <= 1000))", driver);
    run("CREATE TABLE " + dbName + ".tbl10(a string, b int DEFAULT 0)", driver);
    Tuple incrementalDump = incrementalLoadAndVerify(dbName, replDbName);
    replDumpId = incrementalDump.lastReplId;
    String pkName = null;
    String ukName = null;
    String fkName = null;
    String nnName = null;
    String dkName1 = null;
    String ckName1 = null;
    String ckName2 = null;
    try {
        List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(replDbName, "tbl4"));
        assertEquals(pks.size(), 2);
        pkName = pks.get(0).getPk_name();
        List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl6"));
        assertEquals(uks.size(), 1);
        ukName = uks.get(0).getUk_name();
        List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, replDbName, "tbl5"));
        assertEquals(fks.size(), 2);
        fkName = fks.get(0).getFk_name();
        List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl6"));
        assertEquals(nns.size(), 1);
        nnName = nns.get(0).getNn_name();
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl9"));
        assertEquals(cks.size(), 2);
        ckName1 = cks.get(0).getDc_name();
        ckName2 = cks.get(1).getDc_name();
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertEquals(dks.size(), 1);
        dkName1 = dks.get(0).getDc_name();
    } catch (TException te) {
        assertNull(te);
    }
    String dkName2 = "custom_dk_name";
    String ckName3 = "customer_ck_name";
    run("ALTER TABLE " + dbName + ".tbl10 CHANGE COLUMN a a string CONSTRAINT " + ckName3 + " CHECK (a like 'a%')", driver);
    run("ALTER TABLE " + dbName + ".tbl10 CHANGE COLUMN b b int CONSTRAINT " + dkName2 + " DEFAULT 1 ENABLE", driver);
    incrementalLoadAndVerify(dbName, replDbName);
    try {
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertEquals(dks.size(), 2);
        assertEquals(dks.get(1).getDefault_value(), "1");
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertEquals(cks.size(), 1);
        assertEquals(cks.get(0).getDc_name(), ckName3);
    } catch (TException te) {
        assertNull(te);
    }
    run("ALTER TABLE " + dbName + ".tbl4 DROP CONSTRAINT `" + pkName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl4 DROP CONSTRAINT `" + ukName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl5 DROP CONSTRAINT `" + fkName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl6 DROP CONSTRAINT `" + nnName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl9 DROP CONSTRAINT `" + ckName1 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl9 DROP CONSTRAINT `" + ckName2 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl10 DROP CONSTRAINT `" + ckName3 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl10 DROP CONSTRAINT `" + dkName1 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl10 DROP CONSTRAINT `" + dkName2 + "`", driver);
    incrementalLoadAndVerify(dbName, replDbName);
    try {
        List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(replDbName, "tbl4"));
        assertTrue(pks.isEmpty());
        List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl4"));
        assertTrue(uks.isEmpty());
        List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, replDbName, "tbl5"));
        assertTrue(fks.isEmpty());
        List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl6"));
        assertTrue(nns.isEmpty());
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertTrue(dks.isEmpty());
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl9"));
        assertTrue(cks.isEmpty());
        cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertTrue(cks.isEmpty());
        dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl12"));
        assertTrue(dks.isEmpty());
        cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl12"));
        assertTrue(cks.isEmpty());
    } catch (TException te) {
        assertNull(te);
    }
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) UniqueConstraintsRequest(org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest) TException(org.apache.thrift.TException) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) NotNullConstraintsRequest(org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) ForeignKeysRequest(org.apache.hadoop.hive.metastore.api.ForeignKeysRequest) DefaultConstraintsRequest(org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest) CheckConstraintsRequest(org.apache.hadoop.hive.metastore.api.CheckConstraintsRequest) Test(org.junit.Test)

Aggregations

SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)32 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)16 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)15 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)15 Test (org.junit.Test)14 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)13 ArrayList (java.util.ArrayList)11 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)11 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)11 UniqueConstraintsRequest (org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest)11 SQLUniqueConstraintBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLUniqueConstraintBuilder)10 Table (org.apache.hadoop.hive.metastore.api.Table)7 MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)5 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)4 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)3 SQLDefaultConstraintBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLDefaultConstraintBuilder)3 SQLPrimaryKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder)3 TableBuilder (org.apache.hadoop.hive.metastore.client.builder.TableBuilder)3 AddUniqueConstraintMessage (org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage)3 Table (org.apache.hadoop.hive.ql.metadata.Table)3