Search in sources :

Example 1 with SQLAllTableConstraints

use of org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints in project hive by apache.

the class ReplDumpTask method dumpConstraintMetadata.

void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot, Hive hiveDb, long tableId) throws Exception {
    try {
        Path constraintsRoot = new Path(dbRoot, ReplUtils.CONSTRAINTS_ROOT_DIR_NAME);
        Path commonConstraintsFile = new Path(constraintsRoot, ConstraintFileType.COMMON.getPrefix() + tblName);
        Path fkConstraintsFile = new Path(constraintsRoot, ConstraintFileType.FOREIGNKEY.getPrefix() + tblName);
        SQLAllTableConstraints tableConstraints = hiveDb.getTableConstraints(dbName, tblName, tableId);
        if (CollectionUtils.isNotEmpty(tableConstraints.getPrimaryKeys()) || CollectionUtils.isNotEmpty(tableConstraints.getUniqueConstraints()) || CollectionUtils.isNotEmpty(tableConstraints.getNotNullConstraints()) || CollectionUtils.isNotEmpty(tableConstraints.getCheckConstraints()) || CollectionUtils.isNotEmpty(tableConstraints.getDefaultConstraints())) {
            try (JsonWriter jsonWriter = new JsonWriter(commonConstraintsFile.getFileSystem(conf), commonConstraintsFile)) {
                ConstraintsSerializer serializer = new ConstraintsSerializer(tableConstraints.getPrimaryKeys(), null, tableConstraints.getUniqueConstraints(), tableConstraints.getNotNullConstraints(), tableConstraints.getDefaultConstraints(), tableConstraints.getCheckConstraints(), conf);
                serializer.writeTo(jsonWriter, null);
            }
        }
        if (CollectionUtils.isNotEmpty(tableConstraints.getForeignKeys())) {
            try (JsonWriter jsonWriter = new JsonWriter(fkConstraintsFile.getFileSystem(conf), fkConstraintsFile)) {
                ConstraintsSerializer serializer = new ConstraintsSerializer(null, tableConstraints.getForeignKeys(), null, null, null, null, conf);
                serializer.writeTo(jsonWriter, null);
            }
        }
    } catch (NoSuchObjectException e) {
        // Bootstrap constraint dump shouldn't fail if the table is dropped/renamed while dumping it.
        // Just log a debug message and skip it.
        LOG.debug(e.getMessage());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) SQLAllTableConstraints(org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints) ConstraintsSerializer(org.apache.hadoop.hive.ql.parse.repl.dump.io.ConstraintsSerializer) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) JsonWriter(org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter)

Example 2 with SQLAllTableConstraints

use of org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints in project hive by apache.

the class ObjectStore method getAllTableConstraints.

/**
 * Api to fetch all constraints at once
 * @param request request object
 * @return all table constraints
 * @throws MetaException
 * @throws NoSuchObjectException
 */
@Override
public SQLAllTableConstraints getAllTableConstraints(AllTableConstraintsRequest request) throws MetaException, NoSuchObjectException {
    String catName = request.getCatName();
    String dbName = request.getDbName();
    String tblName = request.getTblName();
    debugLog("Get all table constraints for the table - " + catName + "." + dbName + "." + tblName + " in class ObjectStore.java");
    SQLAllTableConstraints sqlAllTableConstraints = new SQLAllTableConstraints();
    sqlAllTableConstraints.setPrimaryKeys(getPrimaryKeys(catName, dbName, tblName));
    sqlAllTableConstraints.setForeignKeys(getForeignKeys(catName, null, null, dbName, tblName));
    sqlAllTableConstraints.setUniqueConstraints(getUniqueConstraints(catName, dbName, tblName));
    sqlAllTableConstraints.setDefaultConstraints(getDefaultConstraints(catName, dbName, tblName));
    sqlAllTableConstraints.setCheckConstraints(getCheckConstraints(catName, dbName, tblName));
    sqlAllTableConstraints.setNotNullConstraints(getNotNullConstraints(catName, dbName, tblName));
    return sqlAllTableConstraints;
}
Also used : SQLAllTableConstraints(org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints)

Example 3 with SQLAllTableConstraints

use of org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints in project hive by apache.

the class SharedCache method listCachedAllTableConstraints.

public SQLAllTableConstraints listCachedAllTableConstraints(String catName, String dbName, String tblName) {
    SQLAllTableConstraints constraints = new SQLAllTableConstraints();
    try {
        cacheLock.readLock().lock();
        TableWrapper tblWrapper = tableCache.getIfPresent(CacheUtils.buildTableKey(catName, dbName, tblName));
        if ((tblWrapper != null) && tblWrapper.isConstraintsValid()) {
            constraints = tblWrapper.getAllTableConstraints();
        }
    } finally {
        cacheLock.readLock().unlock();
    }
    return constraints;
}
Also used : SQLAllTableConstraints(org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints)

Example 4 with SQLAllTableConstraints

use of org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints in project hive by apache.

the class SharedCache method populateTableInCache.

public boolean populateTableInCache(Table table, TableCacheObjects cacheObjects) {
    String catName = StringUtils.normalizeIdentifier(table.getCatName());
    String dbName = StringUtils.normalizeIdentifier(table.getDbName());
    String tableName = StringUtils.normalizeIdentifier(table.getTableName());
    SQLAllTableConstraints constraints = cacheObjects.getTableConstraints();
    // 1. Don't add tables that were deleted while we were preparing list for prewarm
    if (tablesDeletedDuringPrewarm.contains(CacheUtils.buildTableKey(catName, dbName, tableName))) {
        return false;
    }
    TableWrapper tblWrapper = createTableWrapper(catName, dbName, tableName, table);
    if (!table.isSetPartitionKeys() && (cacheObjects.getTableColStats() != null)) {
        if (table.getPartitionKeys().isEmpty() && (cacheObjects.getTableColStats() != null)) {
            return false;
        }
    } else {
        if (cacheObjects.getPartitions() != null) {
            // If the partitions were not added due to memory limit, return false
            if (!tblWrapper.cachePartitions(cacheObjects.getPartitions(), this, true)) {
                return false;
            }
        }
        if (cacheObjects.getPartitionColStats() != null) {
            for (ColumnStatistics cs : cacheObjects.getPartitionColStats()) {
                List<String> partVal;
                try {
                    partVal = Warehouse.makeValsFromName(cs.getStatsDesc().getPartName(), null);
                    List<ColumnStatisticsObj> colStats = cs.getStatsObj();
                    if (!tblWrapper.updatePartitionColStats(partVal, colStats)) {
                        return false;
                    }
                } catch (MetaException e) {
                    LOG.debug("Unable to cache partition column stats for table: " + tableName, e);
                }
            }
        }
        tblWrapper.cacheAggrPartitionColStats(cacheObjects.getAggrStatsAllPartitions(), cacheObjects.getAggrStatsAllButDefaultPartition());
    }
    tblWrapper.setMemberCacheUpdated(MemberName.PARTITION_CACHE, false);
    tblWrapper.setMemberCacheUpdated(MemberName.TABLE_COL_STATS_CACHE, false);
    tblWrapper.setMemberCacheUpdated(MemberName.PARTITION_COL_STATS_CACHE, false);
    tblWrapper.setMemberCacheUpdated(MemberName.AGGR_COL_STATS_CACHE, false);
    if (tblWrapper.cacheConstraints(constraints, true)) {
        tblWrapper.setMemberCacheUpdated(MemberName.PRIMARY_KEY_CACHE, false);
        tblWrapper.setMemberCacheUpdated(MemberName.FOREIGN_KEY_CACHE, false);
        tblWrapper.setMemberCacheUpdated(MemberName.NOTNULL_CONSTRAINT_CACHE, false);
        tblWrapper.setMemberCacheUpdated(MemberName.UNIQUE_CONSTRAINT_CACHE, false);
        tblWrapper.setMemberCacheUpdated(MemberName.DEFAULT_CONSTRAINT_CACHE, false);
        tblWrapper.setMemberCacheUpdated(MemberName.CHECK_CONSTRAINT_CACHE, false);
    } else {
        return false;
    }
    try {
        cacheLock.writeLock().lock();
        // 2. Skip overwriting existing table object
        // (which is present because it was added after prewarm started)
        tableCache.put(CacheUtils.buildTableKey(catName, dbName, tableName), tblWrapper);
        return true;
    } finally {
        cacheLock.writeLock().unlock();
    }
}
Also used : ColumnStatistics(org.apache.hadoop.hive.metastore.api.ColumnStatistics) ColumnStatisticsObj(org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj) SQLAllTableConstraints(org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 5 with SQLAllTableConstraints

use of org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints in project hive by apache.

the class TestGetAllTableConstraints method fewPresentWithMultipleConstraints.

/**
 * Test where only some of the constraints are present along with multiple values for a single constraint
 * @throws TException
 */
@Test
public void fewPresentWithMultipleConstraints() throws TException {
    Table table = testTables[0];
    SQLAllTableConstraints expected = new SQLAllTableConstraints();
    // Set col1 as primary key Constraint in default catalog and database
    List<SQLPrimaryKey> pk = new SQLPrimaryKeyBuilder().onTable(table).addColumn("col1").setConstraintName("col1_pk").build(metaStore.getConf());
    client.addPrimaryKey(pk);
    expected.setPrimaryKeys(pk);
    // Set col2 with Unique Constraint in default catalog and database
    List<SQLUniqueConstraint> uc = new SQLUniqueConstraintBuilder().onTable(table).addColumn("col2").setConstraintName("col2_unique").build(metaStore.getConf());
    client.addUniqueConstraint(uc);
    expected.setUniqueConstraints(uc);
    // Set col3 with default Constraint in default catalog and database
    List<SQLDefaultConstraint> dv = new SQLDefaultConstraintBuilder().onTable(table).addColumn("col3").setConstraintName("col3_default").setDefaultVal(false).build(metaStore.getConf());
    client.addDefaultConstraint(dv);
    expected.setDefaultConstraints(dv);
    // Set col2 with not null constraint in default catalog and database;
    SQLNotNullConstraint nnCol2 = new SQLNotNullConstraint(table.getCatName(), table.getDbName(), table.getTableName(), "col2", "col2_not_null", true, true, true);
    SQLNotNullConstraint nnCol3 = new SQLNotNullConstraint(table.getCatName(), table.getDbName(), table.getTableName(), "col3", "col3_not_null", true, true, true);
    List<SQLNotNullConstraint> nn = new ArrayList<>();
    nn.add(nnCol2);
    nn.add(nnCol3);
    client.addNotNullConstraint(nn);
    expected.setNotNullConstraints(nn);
    expected.setForeignKeys(new ArrayList<>());
    expected.setCheckConstraints(new ArrayList<>());
    // Fetch all constraints for the table in default catalog and database
    AllTableConstraintsRequest request = new AllTableConstraintsRequest(table.getDbName(), table.getTableName(), table.getCatName());
    SQLAllTableConstraints fetched = client.getAllTableConstraints(request);
    Assert.assertEquals(expected, fetched);
}
Also used : SQLPrimaryKeyBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) ArrayList(java.util.ArrayList) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLDefaultConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLDefaultConstraintBuilder) SQLAllTableConstraints(org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints) SQLUniqueConstraintBuilder(org.apache.hadoop.hive.metastore.client.builder.SQLUniqueConstraintBuilder) AllTableConstraintsRequest(org.apache.hadoop.hive.metastore.api.AllTableConstraintsRequest) Test(org.junit.Test) MetastoreCheckinTest(org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)

Aggregations

SQLAllTableConstraints (org.apache.hadoop.hive.metastore.api.SQLAllTableConstraints)8 AllTableConstraintsRequest (org.apache.hadoop.hive.metastore.api.AllTableConstraintsRequest)4 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)3 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)3 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)3 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)3 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)3 Table (org.apache.hadoop.hive.metastore.api.Table)3 Test (org.junit.Test)3 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)2 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)2 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)2 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)2 SQLDefaultConstraintBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLDefaultConstraintBuilder)2 SQLPrimaryKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder)2 SQLUniqueConstraintBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLUniqueConstraintBuilder)2 FileNotFoundException (java.io.FileNotFoundException)1 IOException (java.io.IOException)1 UnknownHostException (java.net.UnknownHostException)1 ArrayList (java.util.ArrayList)1