Search in sources :

Example 11 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class TestReplicationScenarios method testConstraints.

@Test
public void testConstraints() throws IOException {
    String testName = "constraints";
    String dbName = createDB(testName, driver);
    String replDbName = dbName + "_dupe";
    run("CREATE TABLE " + dbName + ".tbl1(a string, b string, primary key (a, b) disable novalidate rely)", driver);
    run("CREATE TABLE " + dbName + ".tbl2(a string, b string, foreign key (a, b) references " + dbName + ".tbl1(a, b) disable novalidate)", driver);
    run("CREATE TABLE " + dbName + ".tbl3(a string, b string not null disable, unique (a) disable)", driver);
    run("CREATE TABLE " + dbName + ".tbl7(a string CHECK (a like 'a%'), price double CHECK (price > 0 AND price <= 1000))", driver);
    run("CREATE TABLE " + dbName + ".tbl8(a string, b int DEFAULT 0)", driver);
    Tuple bootstrapDump = bootstrapLoadAndVerify(dbName, replDbName);
    String replDumpId = bootstrapDump.lastReplId;
    try {
        List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(replDbName, "tbl1"));
        assertEquals(pks.size(), 2);
        List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl3"));
        assertEquals(uks.size(), 1);
        List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, replDbName, "tbl2"));
        assertEquals(fks.size(), 2);
        List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl3"));
        assertEquals(nns.size(), 1);
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl7"));
        assertEquals(cks.size(), 2);
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl8"));
        assertEquals(dks.size(), 1);
    } catch (TException te) {
        assertNull(te);
    }
    run("CREATE TABLE " + dbName + ".tbl4(a string, b string, primary key (a, b) disable novalidate rely)", driver);
    run("CREATE TABLE " + dbName + ".tbl5(a string, b string, foreign key (a, b) references " + dbName + ".tbl4(a, b) disable novalidate)", driver);
    run("CREATE TABLE " + dbName + ".tbl6(a string, b string not null disable, unique (a) disable)", driver);
    run("CREATE TABLE " + dbName + ".tbl9(a string CHECK (a like 'a%'), price double CHECK (price > 0 AND price <= 1000))", driver);
    run("CREATE TABLE " + dbName + ".tbl10(a string, b int DEFAULT 0)", driver);
    Tuple incrementalDump = incrementalLoadAndVerify(dbName, replDbName);
    replDumpId = incrementalDump.lastReplId;
    String pkName = null;
    String ukName = null;
    String fkName = null;
    String nnName = null;
    String dkName1 = null;
    String ckName1 = null;
    String ckName2 = null;
    try {
        List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(replDbName, "tbl4"));
        assertEquals(pks.size(), 2);
        pkName = pks.get(0).getPk_name();
        List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl6"));
        assertEquals(uks.size(), 1);
        ukName = uks.get(0).getUk_name();
        List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, replDbName, "tbl5"));
        assertEquals(fks.size(), 2);
        fkName = fks.get(0).getFk_name();
        List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl6"));
        assertEquals(nns.size(), 1);
        nnName = nns.get(0).getNn_name();
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl9"));
        assertEquals(cks.size(), 2);
        ckName1 = cks.get(0).getDc_name();
        ckName2 = cks.get(1).getDc_name();
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertEquals(dks.size(), 1);
        dkName1 = dks.get(0).getDc_name();
    } catch (TException te) {
        assertNull(te);
    }
    String dkName2 = "custom_dk_name";
    String ckName3 = "customer_ck_name";
    run("ALTER TABLE " + dbName + ".tbl10 CHANGE COLUMN a a string CONSTRAINT " + ckName3 + " CHECK (a like 'a%')", driver);
    run("ALTER TABLE " + dbName + ".tbl10 CHANGE COLUMN b b int CONSTRAINT " + dkName2 + " DEFAULT 1 ENABLE", driver);
    incrementalLoadAndVerify(dbName, replDbName);
    try {
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertEquals(dks.size(), 2);
        assertEquals(dks.get(1).getDefault_value(), "1");
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertEquals(cks.size(), 1);
        assertEquals(cks.get(0).getDc_name(), ckName3);
    } catch (TException te) {
        assertNull(te);
    }
    run("ALTER TABLE " + dbName + ".tbl4 DROP CONSTRAINT `" + pkName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl4 DROP CONSTRAINT `" + ukName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl5 DROP CONSTRAINT `" + fkName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl6 DROP CONSTRAINT `" + nnName + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl9 DROP CONSTRAINT `" + ckName1 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl9 DROP CONSTRAINT `" + ckName2 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl10 DROP CONSTRAINT `" + ckName3 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl10 DROP CONSTRAINT `" + dkName1 + "`", driver);
    run("ALTER TABLE " + dbName + ".tbl10 DROP CONSTRAINT `" + dkName2 + "`", driver);
    incrementalLoadAndVerify(dbName, replDbName);
    try {
        List<SQLPrimaryKey> pks = metaStoreClientMirror.getPrimaryKeys(new PrimaryKeysRequest(replDbName, "tbl4"));
        assertTrue(pks.isEmpty());
        List<SQLUniqueConstraint> uks = metaStoreClientMirror.getUniqueConstraints(new UniqueConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl4"));
        assertTrue(uks.isEmpty());
        List<SQLForeignKey> fks = metaStoreClientMirror.getForeignKeys(new ForeignKeysRequest(null, null, replDbName, "tbl5"));
        assertTrue(fks.isEmpty());
        List<SQLNotNullConstraint> nns = metaStoreClientMirror.getNotNullConstraints(new NotNullConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl6"));
        assertTrue(nns.isEmpty());
        List<SQLDefaultConstraint> dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertTrue(dks.isEmpty());
        List<SQLCheckConstraint> cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl9"));
        assertTrue(cks.isEmpty());
        cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl10"));
        assertTrue(cks.isEmpty());
        dks = metaStoreClientMirror.getDefaultConstraints(new DefaultConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl12"));
        assertTrue(dks.isEmpty());
        cks = metaStoreClientMirror.getCheckConstraints(new CheckConstraintsRequest(DEFAULT_CATALOG_NAME, replDbName, "tbl12"));
        assertTrue(cks.isEmpty());
    } catch (TException te) {
        assertNull(te);
    }
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) UniqueConstraintsRequest(org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest) TException(org.apache.thrift.TException) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) NotNullConstraintsRequest(org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) ForeignKeysRequest(org.apache.hadoop.hive.metastore.api.ForeignKeysRequest) DefaultConstraintsRequest(org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest) CheckConstraintsRequest(org.apache.hadoop.hive.metastore.api.CheckConstraintsRequest) Test(org.junit.Test)

Example 12 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class AlterTableAddConstraintAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    // TODO CAT - for now always use the default catalog.  Eventually will want to see if
    // the user specified a catalog
    List<SQLPrimaryKey> primaryKeys = new ArrayList<>();
    List<SQLForeignKey> foreignKeys = new ArrayList<>();
    List<SQLUniqueConstraint> uniqueConstraints = new ArrayList<>();
    List<SQLCheckConstraint> checkConstraints = new ArrayList<>();
    ASTNode constraintNode = (ASTNode) command.getChild(0);
    switch(constraintNode.getToken().getType()) {
        case HiveParser.TOK_UNIQUE:
            ConstraintsUtils.processUniqueConstraints(tableName, constraintNode, uniqueConstraints);
            break;
        case HiveParser.TOK_PRIMARY_KEY:
            ConstraintsUtils.processPrimaryKeys(tableName, constraintNode, primaryKeys);
            break;
        case HiveParser.TOK_FOREIGN_KEY:
            ConstraintsUtils.processForeignKeys(tableName, constraintNode, foreignKeys);
            break;
        case HiveParser.TOK_CHECK_CONSTRAINT:
            ConstraintsUtils.processCheckConstraints(tableName, constraintNode, null, checkConstraints, command, ctx.getTokenRewriteStream());
            break;
        default:
            throw new SemanticException(ErrorMsg.NOT_RECOGNIZED_CONSTRAINT.getMsg(constraintNode.getToken().getText()));
    }
    Constraints constraints = new Constraints(primaryKeys, foreignKeys, null, uniqueConstraints, null, checkConstraints);
    AlterTableAddConstraintDesc desc = new AlterTableAddConstraintDesc(tableName, null, constraints);
    Table table = getTable(tableName);
    if (AcidUtils.isTransactionalTable(table)) {
        setAcidDdlDesc(desc);
    }
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) Table(org.apache.hadoop.hive.ql.metadata.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) Constraints(org.apache.hadoop.hive.ql.ddl.table.constraint.Constraints) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 13 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class SemanticAnalyzer method analyzeCreateTable.

/**
 * Analyze the create table command. If it is a regular create-table or
 * create-table-like statements, we create a DDLWork and return true. If it is
 * a create-table-as-select, we get the necessary info such as the SerDe and
 * Storage Format and put it in QB, and return false, indicating the rest of
 * the semantic analyzer need to deal with the select statement with respect
 * to the SerDe and Storage Format.
 */
ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx) throws SemanticException {
    TableName qualifiedTabName = getQualifiedTableName((ASTNode) ast.getChild(0));
    final String dbDotTab = qualifiedTabName.getNotEmptyDbTable();
    String likeTableName = null;
    List<FieldSchema> cols = new ArrayList<FieldSchema>();
    List<FieldSchema> partCols = new ArrayList<FieldSchema>();
    List<String> partColNames = new ArrayList<>();
    List<String> bucketCols = new ArrayList<String>();
    List<SQLPrimaryKey> primaryKeys = new ArrayList<SQLPrimaryKey>();
    List<SQLForeignKey> foreignKeys = new ArrayList<SQLForeignKey>();
    List<SQLUniqueConstraint> uniqueConstraints = new ArrayList<>();
    List<SQLNotNullConstraint> notNullConstraints = new ArrayList<>();
    List<SQLDefaultConstraint> defaultConstraints = new ArrayList<>();
    List<SQLCheckConstraint> checkConstraints = new ArrayList<>();
    List<Order> sortCols = new ArrayList<Order>();
    int numBuckets = -1;
    String comment = null;
    String location = null;
    Map<String, String> tblProps = null;
    boolean ifNotExists = false;
    boolean isExt = false;
    boolean isTemporary = false;
    boolean isManaged = false;
    boolean isMaterialization = false;
    boolean isTransactional = false;
    ASTNode selectStmt = null;
    // regular CREATE TABLE
    final int CREATE_TABLE = 0;
    // CREATE TABLE LIKE ... (CTLT)
    final int CTLT = 1;
    // CREATE TABLE AS SELECT ... (CTAS)
    final int CTAS = 2;
    // CREATE TRANSACTIONAL TABLE
    final int ctt = 3;
    int command_type = CREATE_TABLE;
    List<String> skewedColNames = new ArrayList<String>();
    List<List<String>> skewedValues = new ArrayList<List<String>>();
    Map<List<String>, String> listBucketColValuesMapping = new HashMap<List<String>, String>();
    boolean storedAsDirs = false;
    boolean isUserStorageFormat = false;
    boolean partitionTransformSpecExists = false;
    RowFormatParams rowFormatParams = new RowFormatParams();
    StorageFormat storageFormat = new StorageFormat(conf);
    LOG.info("Creating table " + dbDotTab + " position=" + ast.getCharPositionInLine());
    int numCh = ast.getChildCount();
    // set storage handler if default handler is provided in config
    String defaultStorageHandler = HiveConf.getVar(conf, HIVE_DEFAULT_STORAGE_HANDLER);
    if (defaultStorageHandler != null && !defaultStorageHandler.isEmpty()) {
        LOG.info("Default storage handler class detected in config. Using storage handler class if exists: '{}'", defaultStorageHandler);
        storageFormat.setStorageHandler(defaultStorageHandler);
        isUserStorageFormat = true;
    }
    /*
     * Check the 1st-level children and do simple semantic checks: 1) CTLT and
     * CTAS should not coexists. 2) CTLT or CTAS should not coexists with column
     * list (target table schema). 3) CTAS does not support partitioning (for
     * now).
     */
    for (int num = 1; num < numCh; num++) {
        ASTNode child = (ASTNode) ast.getChild(num);
        if (storageFormat.fillStorageFormat(child)) {
            isUserStorageFormat = true;
            continue;
        }
        switch(child.getToken().getType()) {
            case HiveParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveParser.KW_EXTERNAL:
                isExt = true;
                break;
            case HiveParser.KW_MANAGED:
                isManaged = true;
                isTransactional = true;
                break;
            case HiveParser.KW_TEMPORARY:
                isTemporary = true;
                isMaterialization = MATERIALIZATION_MARKER.equals(child.getText());
                break;
            case HiveParser.KW_TRANSACTIONAL:
                isTransactional = true;
                command_type = ctt;
                break;
            case HiveParser.TOK_LIKETABLE:
                if (child.getChildCount() > 0) {
                    likeTableName = getUnescapedName((ASTNode) child.getChild(0));
                    if (likeTableName != null) {
                        if (command_type == CTAS) {
                            throw new SemanticException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
                        }
                        if (cols.size() != 0) {
                            throw new SemanticException(ErrorMsg.CTLT_COLLST_COEXISTENCE.getMsg());
                        }
                    }
                    command_type = CTLT;
                }
                break;
            case // CTAS
            HiveParser.TOK_QUERY:
                if (command_type == CTLT) {
                    throw new SemanticException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
                }
                if (cols.size() != 0) {
                    throw new SemanticException(ErrorMsg.CTAS_COLLST_COEXISTENCE.getMsg());
                }
                if (partCols.size() != 0 || bucketCols.size() != 0) {
                    boolean dynPart = HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING);
                    if (dynPart == false) {
                        throw new SemanticException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
                    } else {
                        // TODO: support dynamic partition for CTAS
                        throw new SemanticException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
                    }
                }
                if (!conf.getBoolVar(ConfVars.HIVE_CTAS_EXTERNAL_TABLES) && isExt) {
                    throw new SemanticException(ErrorMsg.CTAS_EXTTBL_COEXISTENCE.getMsg());
                }
                command_type = CTAS;
                if (plannerCtx != null) {
                    plannerCtx.setCTASToken(child);
                }
                selectStmt = child;
                break;
            case HiveParser.TOK_TABCOLLIST:
                cols = getColumns(child, true, ctx.getTokenRewriteStream(), primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints, conf);
                break;
            case HiveParser.TOK_TABLECOMMENT:
                comment = unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveParser.TOK_TABLEPARTCOLS:
                partCols = getColumns(child, false, ctx.getTokenRewriteStream(), primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints, conf);
                if (hasConstraints(partCols, defaultConstraints, notNullConstraints, checkConstraints)) {
                    // TODO: these constraints should be supported for partition columns
                    throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("NOT NULL,DEFAULT and CHECK Constraints are not allowed with " + "partition columns. "));
                }
                break;
            case HiveParser.TOK_TABLEPARTCOLSBYSPEC:
                List<PartitionTransformSpec> partitionTransformSpec = PartitionTransform.getPartitionTransformSpec(child);
                if (!SessionStateUtil.addResource(conf, hive_metastoreConstants.PARTITION_TRANSFORM_SPEC, partitionTransformSpec)) {
                    throw new SemanticException("Query state attached to Session state must be not null. " + "Partition transform metadata cannot be saved.");
                }
                partitionTransformSpecExists = true;
                break;
            case HiveParser.TOK_TABLEPARTCOLNAMES:
                partColNames = getColumnNames(child);
                break;
            case HiveParser.TOK_ALTERTABLE_BUCKETS:
                bucketCols = getColumnNames((ASTNode) child.getChild(0));
                if (child.getChildCount() == 2) {
                    numBuckets = Integer.parseInt(child.getChild(1).getText());
                } else {
                    sortCols = getColumnNamesOrder((ASTNode) child.getChild(1));
                    numBuckets = Integer.parseInt(child.getChild(2).getText());
                }
                break;
            case HiveParser.TOK_TABLEROWFORMAT:
                rowFormatParams.analyzeRowFormat(child);
                break;
            case HiveParser.TOK_TABLELOCATION:
                location = unescapeSQLString(child.getChild(0).getText());
                location = EximUtil.relativeToAbsolutePath(conf, location);
                inputs.add(toReadEntity(location));
                break;
            case HiveParser.TOK_TABLEPROPERTIES:
                tblProps = getProps((ASTNode) child.getChild(0));
                addPropertyReadEntry(tblProps, inputs);
                break;
            case HiveParser.TOK_TABLESERIALIZER:
                child = (ASTNode) child.getChild(0);
                storageFormat.setSerde(unescapeSQLString(child.getChild(0).getText()));
                if (child.getChildCount() == 2) {
                    readProps((ASTNode) (child.getChild(1).getChild(0)), storageFormat.getSerdeProps());
                }
                break;
            case HiveParser.TOK_TABLESKEWED:
                /**
                 * Throw an error if the user tries to use the DDL with
                 * hive.internal.ddl.list.bucketing.enable set to false.
                 */
                HiveConf hiveConf = SessionState.get().getConf();
                // skewed column names
                skewedColNames = SkewedTableUtils.analyzeSkewedTableDDLColNames(child);
                // skewed value
                skewedValues = SkewedTableUtils.analyzeDDLSkewedValues(child);
                // stored as directories
                storedAsDirs = analyzeStoredAdDirs(child);
                break;
            default:
                throw new AssertionError("Unknown token: " + child.getToken());
        }
    }
    HiveStorageHandler handler;
    try {
        handler = HiveUtils.getStorageHandler(conf, storageFormat.getStorageHandler());
    } catch (HiveException e) {
        throw new SemanticException("Failed to load storage handler:  " + e.getMessage());
    }
    if (handler != null) {
        if (partitionTransformSpecExists && !handler.supportsPartitionTransform()) {
            throw new SemanticException("Partition transform is not supported for " + handler.getClass().getName());
        }
        String fileFormatPropertyKey = handler.getFileFormatPropertyKey();
        if (fileFormatPropertyKey != null) {
            if (tblProps != null && tblProps.containsKey(fileFormatPropertyKey) && storageFormat.getSerdeProps() != null && storageFormat.getSerdeProps().containsKey(fileFormatPropertyKey)) {
                String fileFormat = tblProps.get(fileFormatPropertyKey);
                throw new SemanticException("Provide only one of the following: STORED BY " + fileFormat + " or WITH SERDEPROPERTIES('" + fileFormatPropertyKey + "'='" + fileFormat + "') or" + " TBLPROPERTIES('" + fileFormatPropertyKey + "'='" + fileFormat + "')");
            }
        }
    }
    if (command_type == CREATE_TABLE || command_type == CTLT || command_type == ctt) {
        queryState.setCommandType(HiveOperation.CREATETABLE);
    } else if (command_type == CTAS) {
        queryState.setCommandType(HiveOperation.CREATETABLE_AS_SELECT);
    } else {
        throw new SemanticException("Unrecognized command.");
    }
    if (isExt && ConstraintsUtils.hasEnabledOrValidatedConstraints(notNullConstraints, defaultConstraints, checkConstraints)) {
        throw new SemanticException(ErrorMsg.INVALID_CSTR_SYNTAX.getMsg("Constraints are disallowed with External tables. " + "Only RELY is allowed."));
    }
    if (checkConstraints != null && !checkConstraints.isEmpty()) {
        ConstraintsUtils.validateCheckConstraint(cols, checkConstraints, ctx.getConf());
    }
    storageFormat.fillDefaultStorageFormat(isExt, false);
    // check for existence of table
    if (ifNotExists) {
        try {
            Table table = getTable(qualifiedTabName, false);
            if (table != null) {
                // table exists
                return null;
            }
        } catch (HiveException e) {
            // should not occur since second parameter to getTableWithQN is false
            throw new IllegalStateException("Unexpected Exception thrown: " + e.getMessage(), e);
        }
    }
    if (isTemporary) {
        if (location == null) {
            // it has the same life cycle as the tmp table
            try {
                // Generate a unique ID for temp table path.
                // This path will be fixed for the life of the temp table.
                location = SessionState.generateTempTableLocation(conf);
            } catch (MetaException err) {
                throw new SemanticException("Error while generating temp table path:", err);
            }
        }
    }
    switch(command_type) {
        case // REGULAR CREATE TABLE DDL
        CREATE_TABLE:
            if (!CollectionUtils.isEmpty(partColNames)) {
                throw new SemanticException("Partition columns can only declared using their name and types in regular CREATE TABLE statements");
            }
            tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional, isManaged);
            addDbAndTabToOutputs(new String[] { qualifiedTabName.getDb(), qualifiedTabName.getTable() }, TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
            CreateTableDesc crtTblDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, comment, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getStorageHandler(), storageFormat.getSerdeProps(), tblProps, ifNotExists, skewedColNames, skewedValues, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
            crtTblDesc.setStoredAsSubDirectories(storedAsDirs);
            crtTblDesc.setNullFormat(rowFormatParams.nullFormat);
            crtTblDesc.validate(conf);
            // outputs is empty, which means this create table happens in the current
            // database.
            rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblDesc)));
            break;
        case // CREATE TRANSACTIONAL TABLE
        ctt:
            if (isExt) {
                throw new SemanticException(qualifiedTabName.getTable() + " cannot be declared transactional because it's an external table");
            }
            tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional, isManaged);
            addDbAndTabToOutputs(new String[] { qualifiedTabName.getDb(), qualifiedTabName.getTable() }, TableType.MANAGED_TABLE, false, tblProps, storageFormat);
            CreateTableDesc crtTranTblDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, comment, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getStorageHandler(), storageFormat.getSerdeProps(), tblProps, ifNotExists, skewedColNames, skewedValues, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
            crtTranTblDesc.setStoredAsSubDirectories(storedAsDirs);
            crtTranTblDesc.setNullFormat(rowFormatParams.nullFormat);
            crtTranTblDesc.validate(conf);
            // outputs is empty, which means this create table happens in the current
            // database.
            rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTranTblDesc)));
            break;
        case // create table like <tbl_name>
        CTLT:
            tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional, isManaged);
            addDbAndTabToOutputs(new String[] { qualifiedTabName.getDb(), qualifiedTabName.getTable() }, TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
            Table likeTable = getTable(likeTableName, false);
            if (likeTable != null) {
                if (isTemporary || isExt) {
                    updateDefaultTblProps(likeTable.getParameters(), tblProps, new ArrayList<>(Arrays.asList(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES)));
                } else {
                    updateDefaultTblProps(likeTable.getParameters(), tblProps, null);
                }
            }
            CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(dbDotTab, isExt, isTemporary, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists, likeTableName, isUserStorageFormat);
            rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblLikeDesc)));
            break;
        case // create table as select
        CTAS:
            if (isTemporary) {
                if (!ctx.isExplainSkipExecution() && !isMaterialization) {
                    SessionState ss = SessionState.get();
                    if (ss == null) {
                        throw new SemanticException("No current SessionState, cannot create temporary table " + qualifiedTabName.getNotEmptyDbTable());
                    }
                    Map<String, Table> tables = SessionHiveMetaStoreClient.getTempTablesForDatabase(qualifiedTabName.getDb(), qualifiedTabName.getTable());
                    if (tables != null && tables.containsKey(qualifiedTabName.getTable())) {
                        throw new SemanticException("Temporary table " + qualifiedTabName.getNotEmptyDbTable() + " already exists");
                    }
                }
            } else {
                // dumpTable is only used to check the conflict for non-temporary tables
                try {
                    Table dumpTable = db.newTable(dbDotTab);
                    if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false) && !ctx.isExplainSkipExecution()) {
                        throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTab));
                    }
                } catch (HiveException e) {
                    throw new SemanticException(e);
                }
            }
            if (location != null && location.length() != 0) {
                Path locPath = new Path(location);
                FileSystem curFs = null;
                FileStatus locStats = null;
                try {
                    curFs = locPath.getFileSystem(conf);
                    if (curFs != null) {
                        locStats = curFs.getFileStatus(locPath);
                    }
                    if (locStats != null && locStats.isDir()) {
                        FileStatus[] lStats = curFs.listStatus(locPath);
                        if (lStats != null && lStats.length != 0) {
                            // Don't throw an exception if the target location only contains the staging-dirs
                            for (FileStatus lStat : lStats) {
                                if (!lStat.getPath().getName().startsWith(HiveConf.getVar(conf, HiveConf.ConfVars.STAGINGDIR))) {
                                    throw new SemanticException(ErrorMsg.CTAS_LOCATION_NONEMPTY.getMsg(location));
                                }
                            }
                        }
                    }
                } catch (FileNotFoundException nfe) {
                // we will create the folder if it does not exist.
                } catch (IOException ioE) {
                    LOG.debug("Exception when validate folder", ioE);
                }
            }
            if (!CollectionUtils.isEmpty(partCols)) {
                throw new SemanticException("Partition columns can only declared using their names in CTAS statements");
            }
            tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary, isTransactional, isManaged);
            tblProps.put(TABLE_IS_CTAS, "true");
            addDbAndTabToOutputs(new String[] { qualifiedTabName.getDb(), qualifiedTabName.getTable() }, TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
            tableDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partColNames, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim, rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim, comment, storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getStorageHandler(), storageFormat.getSerdeProps(), tblProps, ifNotExists, skewedColNames, skewedValues, true, primaryKeys, foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints);
            tableDesc.setMaterialization(isMaterialization);
            tableDesc.setStoredAsSubDirectories(storedAsDirs);
            tableDesc.setNullFormat(rowFormatParams.nullFormat);
            qb.setTableDesc(tableDesc);
            return selectStmt;
        default:
            throw new SemanticException("Unrecognized command.");
    }
    return null;
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) FileStatus(org.apache.hadoop.fs.FileStatus) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) FileNotFoundException(java.io.FileNotFoundException) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) FileSystem(org.apache.hadoop.fs.FileSystem) LinkedList(java.util.LinkedList) ArrayList(java.util.ArrayList) ValidTxnWriteIdList(org.apache.hadoop.hive.common.ValidTxnWriteIdList) ValidTxnList(org.apache.hadoop.hive.common.ValidTxnList) List(java.util.List) HiveConf(org.apache.hadoop.hive.conf.HiveConf) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) Order(org.apache.hadoop.hive.metastore.api.Order) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) HiveStorageHandler(org.apache.hadoop.hive.ql.metadata.HiveStorageHandler) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) TableName(org.apache.hadoop.hive.common.TableName) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) CreateTableLikeDesc(org.apache.hadoop.hive.ql.ddl.table.create.like.CreateTableLikeDesc) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) Path(org.apache.hadoop.fs.Path) SourceTable(org.apache.hadoop.hive.metastore.api.SourceTable) Table(org.apache.hadoop.hive.ql.metadata.Table) IOException(java.io.IOException) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) DefaultConstraint(org.apache.hadoop.hive.ql.metadata.DefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) CreateTableDesc(org.apache.hadoop.hive.ql.ddl.table.create.CreateTableDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork)

Example 14 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class NonCatCallsWithCatalog method createTableWithConstraints.

@Test
public void createTableWithConstraints() throws TException {
    Table parentTable = testTables[2];
    Table table = new TableBuilder().setTableName("table_in_other_catalog_with_constraints").addCol("col1", "int").addCol("col2", "varchar(32)").addCol("col3", "int").addCol("col4", "int").addCol("col5", "int").addCol("col6", "int").build(conf);
    table.unsetCatName();
    List<SQLPrimaryKey> parentPk = new SQLPrimaryKeyBuilder().onTable(parentTable).addColumn("test_col1").build(conf);
    for (SQLPrimaryKey pkcol : parentPk) {
        pkcol.unsetCatName();
    }
    client.addPrimaryKey(parentPk);
    List<SQLPrimaryKey> pk = new SQLPrimaryKeyBuilder().onTable(table).addColumn("col2").build(conf);
    for (SQLPrimaryKey pkcol : pk) {
        pkcol.unsetCatName();
    }
    List<SQLForeignKey> fk = new SQLForeignKeyBuilder().fromPrimaryKey(parentPk).onTable(table).addColumn("col1").build(conf);
    for (SQLForeignKey fkcol : fk) {
        fkcol.unsetCatName();
    }
    List<SQLDefaultConstraint> dv = new SQLDefaultConstraintBuilder().onTable(table).addColumn("col3").setDefaultVal(0).build(conf);
    for (SQLDefaultConstraint dccol : dv) {
        dccol.unsetCatName();
    }
    List<SQLNotNullConstraint> nn = new SQLNotNullConstraintBuilder().onTable(table).addColumn("col4").build(conf);
    for (SQLNotNullConstraint nncol : nn) {
        nncol.unsetCatName();
    }
    List<SQLUniqueConstraint> uc = new SQLUniqueConstraintBuilder().onTable(table).addColumn("col5").build(conf);
    for (SQLUniqueConstraint uccol : uc) {
        uccol.unsetCatName();
    }
    List<SQLCheckConstraint> cc = new SQLCheckConstraintBuilder().onTable(table).addColumn("col6").setCheckExpression("> 0").build(conf);
    for (SQLCheckConstraint cccol : cc) {
        cccol.unsetCatName();
    }
    client.createTableWithConstraints(table, pk, fk, uc, nn, dv, cc);
    PrimaryKeysRequest pkRqst = new PrimaryKeysRequest(parentTable.getDbName(), parentTable.getTableName());
    pkRqst.setCatName(parentTable.getCatName());
    List<SQLPrimaryKey> pkFetched = client.getPrimaryKeys(pkRqst);
    Assert.assertEquals(1, pkFetched.size());
    Assert.assertEquals(expectedCatalog(), pkFetched.get(0).getCatName());
    Assert.assertEquals(parentTable.getDbName(), pkFetched.get(0).getTable_db());
    Assert.assertEquals(parentTable.getTableName(), pkFetched.get(0).getTable_name());
    Assert.assertEquals("test_col1", pkFetched.get(0).getColumn_name());
    Assert.assertEquals(1, pkFetched.get(0).getKey_seq());
    Assert.assertTrue(pkFetched.get(0).isEnable_cstr());
    Assert.assertFalse(pkFetched.get(0).isValidate_cstr());
    Assert.assertFalse(pkFetched.get(0).isRely_cstr());
    Assert.assertEquals(parentTable.getCatName(), pkFetched.get(0).getCatName());
    ForeignKeysRequest fkRqst = new ForeignKeysRequest(parentTable.getDbName(), parentTable.getTableName(), table.getDbName(), table.getTableName());
    fkRqst.setCatName(table.getCatName());
    List<SQLForeignKey> fkFetched = client.getForeignKeys(fkRqst);
    Assert.assertEquals(1, fkFetched.size());
    Assert.assertEquals(expectedCatalog(), fkFetched.get(0).getCatName());
    Assert.assertEquals(table.getDbName(), fkFetched.get(0).getFktable_db());
    Assert.assertEquals(table.getTableName(), fkFetched.get(0).getFktable_name());
    Assert.assertEquals("col1", fkFetched.get(0).getFkcolumn_name());
    Assert.assertEquals(parentTable.getDbName(), fkFetched.get(0).getPktable_db());
    Assert.assertEquals(parentTable.getTableName(), fkFetched.get(0).getPktable_name());
    Assert.assertEquals(1, fkFetched.get(0).getKey_seq());
    Assert.assertEquals(parentTable.getTableName() + "_primary_key", fkFetched.get(0).getPk_name());
    Assert.assertTrue(fkFetched.get(0).isEnable_cstr());
    Assert.assertFalse(fkFetched.get(0).isValidate_cstr());
    Assert.assertFalse(fkFetched.get(0).isRely_cstr());
    Assert.assertEquals(table.getCatName(), fkFetched.get(0).getCatName());
    NotNullConstraintsRequest nnRqst = new NotNullConstraintsRequest(table.getCatName(), table.getDbName(), table.getTableName());
    List<SQLNotNullConstraint> nnFetched = client.getNotNullConstraints(nnRqst);
    Assert.assertEquals(1, nnFetched.size());
    Assert.assertEquals(table.getDbName(), nnFetched.get(0).getTable_db());
    Assert.assertEquals(table.getTableName(), nnFetched.get(0).getTable_name());
    Assert.assertEquals("col4", nnFetched.get(0).getColumn_name());
    Assert.assertEquals(table.getTableName() + "_not_null_constraint", nnFetched.get(0).getNn_name());
    Assert.assertTrue(nnFetched.get(0).isEnable_cstr());
    Assert.assertFalse(nnFetched.get(0).isValidate_cstr());
    Assert.assertFalse(nnFetched.get(0).isRely_cstr());
    Assert.assertEquals(table.getCatName(), nnFetched.get(0).getCatName());
    UniqueConstraintsRequest ucRqst = new UniqueConstraintsRequest(table.getCatName(), table.getDbName(), table.getTableName());
    List<SQLUniqueConstraint> ucFetched = client.getUniqueConstraints(ucRqst);
    Assert.assertEquals(1, ucFetched.size());
    Assert.assertEquals(table.getDbName(), ucFetched.get(0).getTable_db());
    Assert.assertEquals(table.getTableName(), ucFetched.get(0).getTable_name());
    Assert.assertEquals("col5", ucFetched.get(0).getColumn_name());
    Assert.assertEquals(1, ucFetched.get(0).getKey_seq());
    Assert.assertEquals(table.getTableName() + "_unique_constraint", ucFetched.get(0).getUk_name());
    Assert.assertTrue(ucFetched.get(0).isEnable_cstr());
    Assert.assertFalse(ucFetched.get(0).isValidate_cstr());
    Assert.assertFalse(ucFetched.get(0).isRely_cstr());
    Assert.assertEquals(table.getCatName(), ucFetched.get(0).getCatName());
    DefaultConstraintsRequest dcRqst = new DefaultConstraintsRequest(table.getCatName(), table.getDbName(), table.getTableName());
    List<SQLDefaultConstraint> dcFetched = client.getDefaultConstraints(dcRqst);
    Assert.assertEquals(1, dcFetched.size());
    Assert.assertEquals(expectedCatalog(), dcFetched.get(0).getCatName());
    Assert.assertEquals(table.getDbName(), dcFetched.get(0).getTable_db());
    Assert.assertEquals(table.getTableName(), dcFetched.get(0).getTable_name());
    Assert.assertEquals("col3", dcFetched.get(0).getColumn_name());
    Assert.assertEquals("0", dcFetched.get(0).getDefault_value());
    Assert.assertEquals(table.getTableName() + "_default_value", dcFetched.get(0).getDc_name());
    Assert.assertTrue(dcFetched.get(0).isEnable_cstr());
    Assert.assertFalse(dcFetched.get(0).isValidate_cstr());
    Assert.assertFalse(dcFetched.get(0).isRely_cstr());
    Assert.assertEquals(table.getCatName(), dcFetched.get(0).getCatName());
    CheckConstraintsRequest ccRqst = new CheckConstraintsRequest(table.getCatName(), table.getDbName(), table.getTableName());
    List<SQLCheckConstraint> ccFetched = client.getCheckConstraints(ccRqst);
    Assert.assertEquals(1, ccFetched.size());
    Assert.assertEquals(expectedCatalog(), ccFetched.get(0).getCatName());
    Assert.assertEquals(table.getDbName(), ccFetched.get(0).getTable_db());
    Assert.assertEquals(table.getTableName(), ccFetched.get(0).getTable_name());
    Assert.assertEquals("col6", ccFetched.get(0).getColumn_name());
    Assert.assertEquals("> 0", ccFetched.get(0).getCheck_expression());
    Assert.assertEquals(table.getTableName() + "_check_constraint", ccFetched.get(0).getDc_name());
    Assert.assertTrue(ccFetched.get(0).isEnable_cstr());
    Assert.assertFalse(ccFetched.get(0).isValidate_cstr());
    Assert.assertFalse(ccFetched.get(0).isRely_cstr());
    Assert.assertEquals(table.getCatName(), ccFetched.get(0).getCatName());
}
Also used : SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) NotNullConstraintsRequest(org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest) TableBuilder(org.apache.hadoop.hive.metastore.client.builder.TableBuilder) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLDefaultConstraintBuilder) SQLUniqueConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLUniqueConstraintBuilder) SQLPrimaryKeyBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) UniqueConstraintsRequest(org.apache.hadoop.hive.metastore.api.UniqueConstraintsRequest) SQLForeignKeyBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLCheckConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLCheckConstraintBuilder) Table(org.apache.hadoop.hive.metastore.api.Table) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) PrimaryKeysRequest(org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest) ForeignKeysRequest(org.apache.hadoop.hive.metastore.api.ForeignKeysRequest) DefaultConstraintsRequest(org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest) SQLNotNullConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLNotNullConstraintBuilder) CheckConstraintsRequest(org.apache.hadoop.hive.metastore.api.CheckConstraintsRequest) Test(org.junit.Test)

Example 15 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class ObjectStore method addForeignKeys.

// TODO: clean up this method
private List<SQLForeignKey> addForeignKeys(List<SQLForeignKey> foreignKeys, boolean retrieveCD, List<SQLPrimaryKey> primaryKeys, List<SQLUniqueConstraint> uniqueConstraints) throws InvalidObjectException, MetaException {
    if (CollectionUtils.isNotEmpty(foreignKeys)) {
        List<MConstraint> mpkfks = new ArrayList<>();
        String currentConstraintName = null;
        String catName = null;
        // checks / adds information about each column.
        for (int i = 0; i < foreignKeys.size(); i++) {
            if (catName == null) {
                catName = normalizeIdentifier(foreignKeys.get(i).isSetCatName() ? foreignKeys.get(i).getCatName() : getDefaultCatalog(conf));
            } else {
                String tmpCatName = normalizeIdentifier(foreignKeys.get(i).isSetCatName() ? foreignKeys.get(i).getCatName() : getDefaultCatalog(conf));
                if (!catName.equals(tmpCatName)) {
                    throw new InvalidObjectException("Foreign keys cannot span catalogs");
                }
            }
            final String fkTableDB = normalizeIdentifier(foreignKeys.get(i).getFktable_db());
            final String fkTableName = normalizeIdentifier(foreignKeys.get(i).getFktable_name());
            // If retrieveCD is false, we do not need to do a deep retrieval of the Table Column Descriptor.
            // For instance, this is the case when we are creating the table.
            final AttachedMTableInfo nChildTable = getMTable(catName, fkTableDB, fkTableName, retrieveCD);
            final MTable childTable = nChildTable.mtbl;
            if (childTable == null) {
                throw new InvalidObjectException("Child table not found: " + fkTableName);
            }
            MColumnDescriptor childCD = retrieveCD ? nChildTable.mcd : childTable.getSd().getCD();
            final List<MFieldSchema> childCols = childCD == null || childCD.getCols() == null ? new ArrayList<>() : new ArrayList<>(childCD.getCols());
            if (childTable.getPartitionKeys() != null) {
                childCols.addAll(childTable.getPartitionKeys());
            }
            final String pkTableDB = normalizeIdentifier(foreignKeys.get(i).getPktable_db());
            final String pkTableName = normalizeIdentifier(foreignKeys.get(i).getPktable_name());
            // For primary keys, we retrieve the column descriptors if retrieveCD is true (which means
            // it is an alter table statement) or if it is a create table statement but we are
            // referencing another table instead of self for the primary key.
            final AttachedMTableInfo nParentTable;
            final MTable parentTable;
            MColumnDescriptor parentCD;
            final List<MFieldSchema> parentCols;
            final List<SQLPrimaryKey> existingTablePrimaryKeys;
            final List<SQLUniqueConstraint> existingTableUniqueConstraints;
            final boolean sameTable = fkTableDB.equals(pkTableDB) && fkTableName.equals(pkTableName);
            if (sameTable) {
                nParentTable = nChildTable;
                parentTable = childTable;
                parentCD = childCD;
                parentCols = childCols;
                existingTablePrimaryKeys = primaryKeys;
                existingTableUniqueConstraints = uniqueConstraints;
            } else {
                nParentTable = getMTable(catName, pkTableDB, pkTableName, true);
                parentTable = nParentTable.mtbl;
                if (parentTable == null) {
                    throw new InvalidObjectException("Parent table not found: " + pkTableName);
                }
                parentCD = nParentTable.mcd;
                parentCols = parentCD == null || parentCD.getCols() == null ? new ArrayList<>() : new ArrayList<>(parentCD.getCols());
                if (parentTable.getPartitionKeys() != null) {
                    parentCols.addAll(parentTable.getPartitionKeys());
                }
                existingTablePrimaryKeys = getPrimaryKeys(catName, pkTableDB, pkTableName);
                existingTableUniqueConstraints = getUniqueConstraints(catName, pkTableDB, pkTableName);
            }
            // the columns correspond.
            if (existingTablePrimaryKeys.isEmpty() && existingTableUniqueConstraints.isEmpty()) {
                throw new MetaException("Trying to define foreign key but there are no primary keys or unique keys for referenced table");
            }
            final Set<String> validPKsOrUnique = generateValidPKsOrUniqueSignatures(parentCols, existingTablePrimaryKeys, existingTableUniqueConstraints);
            StringBuilder fkSignature = new StringBuilder();
            StringBuilder referencedKSignature = new StringBuilder();
            for (; i < foreignKeys.size(); i++) {
                SQLForeignKey foreignKey = foreignKeys.get(i);
                final String fkColumnName = normalizeIdentifier(foreignKey.getFkcolumn_name());
                int childIntegerIndex = getColumnIndexFromTableColumns(childCD.getCols(), fkColumnName);
                if (childIntegerIndex == -1) {
                    if (childTable.getPartitionKeys() != null) {
                        childCD = null;
                        childIntegerIndex = getColumnIndexFromTableColumns(childTable.getPartitionKeys(), fkColumnName);
                    }
                    if (childIntegerIndex == -1) {
                        throw new InvalidObjectException("Child column not found: " + fkColumnName);
                    }
                }
                final String pkColumnName = normalizeIdentifier(foreignKey.getPkcolumn_name());
                int parentIntegerIndex = getColumnIndexFromTableColumns(parentCD.getCols(), pkColumnName);
                if (parentIntegerIndex == -1) {
                    if (parentTable.getPartitionKeys() != null) {
                        parentCD = null;
                        parentIntegerIndex = getColumnIndexFromTableColumns(parentTable.getPartitionKeys(), pkColumnName);
                    }
                    if (parentIntegerIndex == -1) {
                        throw new InvalidObjectException("Parent column not found: " + pkColumnName);
                    }
                }
                if (foreignKey.getFk_name() == null) {
                    // the uniqueness of the generated constraint name.
                    if (foreignKey.getKey_seq() == 1) {
                        currentConstraintName = generateConstraintName(parentTable, fkTableDB, fkTableName, pkTableDB, pkTableName, pkColumnName, fkColumnName, "fk");
                    }
                } else {
                    currentConstraintName = normalizeIdentifier(foreignKey.getFk_name());
                    if (constraintNameAlreadyExists(parentTable, currentConstraintName)) {
                        String fqConstraintName = String.format("%s.%s.%s", parentTable.getDatabase().getName(), parentTable.getTableName(), currentConstraintName);
                        throw new InvalidObjectException("Constraint name already exists: " + fqConstraintName);
                    }
                }
                // Update Column, keys, table, database, catalog name
                foreignKey.setFk_name(currentConstraintName);
                foreignKey.setCatName(catName);
                foreignKey.setFktable_db(fkTableDB);
                foreignKey.setFktable_name(fkTableName);
                foreignKey.setPktable_db(pkTableDB);
                foreignKey.setPktable_name(pkTableName);
                foreignKey.setFkcolumn_name(fkColumnName);
                foreignKey.setPkcolumn_name(pkColumnName);
                Integer updateRule = foreignKey.getUpdate_rule();
                Integer deleteRule = foreignKey.getDelete_rule();
                int enableValidateRely = (foreignKey.isEnable_cstr() ? 4 : 0) + (foreignKey.isValidate_cstr() ? 2 : 0) + (foreignKey.isRely_cstr() ? 1 : 0);
                MConstraint mpkfk = new MConstraint(currentConstraintName, foreignKey.getKey_seq(), MConstraint.FOREIGN_KEY_CONSTRAINT, deleteRule, updateRule, enableValidateRely, parentTable, childTable, parentCD, childCD, childIntegerIndex, parentIntegerIndex);
                mpkfks.add(mpkfk);
                final String fkColType = getColumnFromTableColumns(childCols, fkColumnName).getType();
                fkSignature.append(generateColNameTypeSignature(fkColumnName, fkColType));
                referencedKSignature.append(generateColNameTypeSignature(pkColumnName, fkColType));
                if (i + 1 < foreignKeys.size() && foreignKeys.get(i + 1).getKey_seq() == 1) {
                    // Next one is a new key, we bail out from the inner loop
                    break;
                }
            }
            String referenced = referencedKSignature.toString();
            if (!validPKsOrUnique.contains(referenced)) {
                throw new MetaException("Foreign key references " + referenced + " but no corresponding " + "primary key or unique key exists. Possible keys: " + validPKsOrUnique);
            }
            if (sameTable && fkSignature.toString().equals(referenced)) {
                throw new MetaException("Cannot be both foreign key and primary/unique key on same table: " + referenced);
            }
            fkSignature = new StringBuilder();
            referencedKSignature = new StringBuilder();
        }
        pm.makePersistentAll(mpkfks);
    }
    return foreignKeys;
}
Also used : SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) ArrayList(java.util.ArrayList) MColumnDescriptor(org.apache.hadoop.hive.metastore.model.MColumnDescriptor) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) MTable(org.apache.hadoop.hive.metastore.model.MTable) InvalidObjectException(org.apache.hadoop.hive.metastore.api.InvalidObjectException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Aggregations

SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)46 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)28 Test (org.junit.Test)20 Table (org.apache.hadoop.hive.metastore.api.Table)19 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)15 ArrayList (java.util.ArrayList)13 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)13 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)13 SQLForeignKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder)13 SQLPrimaryKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder)13 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)12 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)11 ForeignKeysRequest (org.apache.hadoop.hive.metastore.api.ForeignKeysRequest)9 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)6 IOException (java.io.IOException)5 HashMap (java.util.HashMap)5 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)5 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)5 TApplicationException (org.apache.thrift.TApplicationException)5 Tree (org.antlr.runtime.tree.Tree)3