Search in sources :

Example 31 with SQLPrimaryKey

use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.

the class MetaStoreDirectSql method getPrimaryKeys.

public List<SQLPrimaryKey> getPrimaryKeys(String db_name, String tbl_name) throws MetaException {
    List<SQLPrimaryKey> ret = new ArrayList<SQLPrimaryKey>();
    String queryText = "SELECT " + DBS + ".\"NAME\", " + TBLS + ".\"TBL_NAME\", " + "CASE WHEN " + COLUMNS_V2 + ".\"COLUMN_NAME\" IS NOT NULL THEN " + COLUMNS_V2 + ".\"COLUMN_NAME\" " + "ELSE " + PARTITION_KEYS + ".\"PKEY_NAME\" END, " + KEY_CONSTRAINTS + ".\"POSITION\", " + "" + KEY_CONSTRAINTS + ".\"CONSTRAINT_NAME\", " + KEY_CONSTRAINTS + ".\"ENABLE_VALIDATE_RELY\" " + " from " + TBLS + " " + " INNER JOIN " + KEY_CONSTRAINTS + " ON " + TBLS + ".\"TBL_ID\" = " + KEY_CONSTRAINTS + ".\"PARENT_TBL_ID\" " + " INNER JOIN " + DBS + " ON " + TBLS + ".\"DB_ID\" = " + DBS + ".\"DB_ID\" " + " LEFT OUTER JOIN " + COLUMNS_V2 + " ON " + COLUMNS_V2 + ".\"CD_ID\" = " + KEY_CONSTRAINTS + ".\"PARENT_CD_ID\" AND " + " " + COLUMNS_V2 + ".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " LEFT OUTER JOIN " + PARTITION_KEYS + " ON " + TBLS + ".\"TBL_ID\" = " + PARTITION_KEYS + ".\"TBL_ID\" AND " + " " + PARTITION_KEYS + ".\"INTEGER_IDX\" = " + KEY_CONSTRAINTS + ".\"PARENT_INTEGER_IDX\" " + " WHERE " + KEY_CONSTRAINTS + ".\"CONSTRAINT_TYPE\" = " + MConstraint.PRIMARY_KEY_CONSTRAINT + " AND" + (db_name == null ? "" : " " + DBS + ".\"NAME\" = ? AND") + (tbl_name == null ? "" : " " + TBLS + ".\"TBL_NAME\" = ? ");
    queryText = queryText.trim();
    if (queryText.endsWith("AND")) {
        queryText = queryText.substring(0, queryText.length() - 3);
    }
    List<String> pms = new ArrayList<String>();
    if (db_name != null) {
        pms.add(db_name);
    }
    if (tbl_name != null) {
        pms.add(tbl_name);
    }
    Query queryParams = pm.newQuery("javax.jdo.query.SQL", queryText);
    List<Object[]> sqlResult = ensureList(executeWithArray(queryParams, pms.toArray(), queryText));
    if (!sqlResult.isEmpty()) {
        for (Object[] line : sqlResult) {
            int enableValidateRely = extractSqlInt(line[5]);
            boolean enable = (enableValidateRely & 4) != 0;
            boolean validate = (enableValidateRely & 2) != 0;
            boolean rely = (enableValidateRely & 1) != 0;
            SQLPrimaryKey currKey = new SQLPrimaryKey(extractSqlString(line[0]), extractSqlString(line[1]), extractSqlString(line[2]), extractSqlInt(line[3]), extractSqlString(line[4]), enable, validate, rely);
            ret.add(currKey);
        }
    }
    return ret;
}
Also used : SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Query(javax.jdo.Query) ArrayList(java.util.ArrayList) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)

Example 32 with SQLPrimaryKey

use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.

the class TestObjectStore method testTableOps.

/**
 * Test table operations
 */
@Test
public void testTableOps() throws MetaException, InvalidObjectException, NoSuchObjectException, InvalidInputException {
    Database db1 = new Database(DB1, "description", "locationurl", null);
    objectStore.createDatabase(db1);
    StorageDescriptor sd1 = new StorageDescriptor(ImmutableList.of(new FieldSchema("pk_col", "double", null)), "location", null, null, false, 0, new SerDeInfo("SerDeName", "serializationLib", null), null, null, null);
    HashMap<String, String> params = new HashMap<>();
    params.put("EXTERNAL", "false");
    Table tbl1 = new Table(TABLE1, DB1, "owner", 1, 2, 3, sd1, null, params, null, null, "MANAGED_TABLE");
    objectStore.createTable(tbl1);
    List<String> tables = objectStore.getAllTables(DB1);
    Assert.assertEquals(1, tables.size());
    Assert.assertEquals(TABLE1, tables.get(0));
    StorageDescriptor sd2 = new StorageDescriptor(ImmutableList.of(new FieldSchema("fk_col", "double", null)), "location", null, null, false, 0, new SerDeInfo("SerDeName", "serializationLib", null), null, null, null);
    Table newTbl1 = new Table("new" + TABLE1, DB1, "owner", 1, 2, 3, sd2, null, params, null, null, "MANAGED_TABLE");
    objectStore.alterTable(DB1, TABLE1, newTbl1);
    tables = objectStore.getTables(DB1, "new*");
    Assert.assertEquals(1, tables.size());
    Assert.assertEquals("new" + TABLE1, tables.get(0));
    objectStore.createTable(tbl1);
    tables = objectStore.getAllTables(DB1);
    Assert.assertEquals(2, tables.size());
    List<SQLForeignKey> foreignKeys = objectStore.getForeignKeys(DB1, TABLE1, null, null);
    Assert.assertEquals(0, foreignKeys.size());
    SQLPrimaryKey pk = new SQLPrimaryKey(DB1, TABLE1, "pk_col", 1, "pk_const_1", false, false, false);
    objectStore.addPrimaryKeys(ImmutableList.of(pk));
    SQLForeignKey fk = new SQLForeignKey(DB1, TABLE1, "pk_col", DB1, "new" + TABLE1, "fk_col", 1, 0, 0, "fk_const_1", "pk_const_1", false, false, false);
    objectStore.addForeignKeys(ImmutableList.of(fk));
    // Retrieve from PK side
    foreignKeys = objectStore.getForeignKeys(null, null, DB1, "new" + TABLE1);
    Assert.assertEquals(1, foreignKeys.size());
    List<SQLForeignKey> fks = objectStore.getForeignKeys(null, null, DB1, "new" + TABLE1);
    if (fks != null) {
        for (SQLForeignKey fkcol : fks) {
            objectStore.dropConstraint(fkcol.getFktable_db(), fkcol.getFktable_name(), fkcol.getFk_name());
        }
    }
    // Retrieve from FK side
    foreignKeys = objectStore.getForeignKeys(DB1, TABLE1, null, null);
    Assert.assertEquals(0, foreignKeys.size());
    // Retrieve from PK side
    foreignKeys = objectStore.getForeignKeys(null, null, DB1, "new" + TABLE1);
    Assert.assertEquals(0, foreignKeys.size());
    objectStore.dropTable(DB1, TABLE1);
    tables = objectStore.getAllTables(DB1);
    Assert.assertEquals(1, tables.size());
    objectStore.dropTable(DB1, "new" + TABLE1);
    tables = objectStore.getAllTables(DB1);
    Assert.assertEquals(0, tables.size());
    objectStore.dropDatabase(DB1);
}
Also used : SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) Database(org.apache.hadoop.hive.metastore.api.Database) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) MetastoreUnitTest(org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest) Test(org.junit.Test)

Example 33 with SQLPrimaryKey

use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.

the class ObjectStore method getPrimaryKeysViaJdo.

private List<SQLPrimaryKey> getPrimaryKeysViaJdo(String db_name, String tbl_name) throws MetaException {
    boolean commited = false;
    List<SQLPrimaryKey> primaryKeys = null;
    Query query = null;
    try {
        openTransaction();
        query = pm.newQuery(MConstraint.class, "parentTable.tableName == tbl_name && parentTable.database.name == db_name &&" + " constraintType == MConstraint.PRIMARY_KEY_CONSTRAINT");
        query.declareParameters("java.lang.String tbl_name, java.lang.String db_name");
        Collection<?> constraints = (Collection<?>) query.execute(tbl_name, db_name);
        pm.retrieveAll(constraints);
        primaryKeys = new ArrayList<>();
        for (Iterator<?> i = constraints.iterator(); i.hasNext(); ) {
            MConstraint currPK = (MConstraint) i.next();
            List<MFieldSchema> cols = currPK.getParentColumn() != null ? currPK.getParentColumn().getCols() : currPK.getParentTable().getPartitionKeys();
            int enableValidateRely = currPK.getEnableValidateRely();
            boolean enable = (enableValidateRely & 4) != 0;
            boolean validate = (enableValidateRely & 2) != 0;
            boolean rely = (enableValidateRely & 1) != 0;
            primaryKeys.add(new SQLPrimaryKey(db_name, tbl_name, cols.get(currPK.getParentIntegerIndex()).getName(), currPK.getPosition(), currPK.getConstraintName(), enable, validate, rely));
        }
        commited = commitTransaction();
    } finally {
        rollbackAndCleanup(commited, query);
    }
    return primaryKeys;
}
Also used : SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Query(javax.jdo.Query) MFieldSchema(org.apache.hadoop.hive.metastore.model.MFieldSchema) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) Collection(java.util.Collection) MConstraint(org.apache.hadoop.hive.metastore.model.MConstraint) SQLUniqueConstraint(org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint) SQLCheckConstraint(org.apache.hadoop.hive.metastore.api.SQLCheckConstraint) SQLDefaultConstraint(org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint) SQLNotNullConstraint(org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)

Aggregations

SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)33 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)17 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)14 ArrayList (java.util.ArrayList)12 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)10 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)8 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)8 Table (org.apache.hadoop.hive.metastore.api.Table)7 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)7 Test (org.junit.Test)7 AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)6 IOException (java.io.IOException)4 Serializable (java.io.Serializable)4 HashMap (java.util.HashMap)4 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)4 PrimaryKeysRequest (org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest)4 MConstraint (org.apache.hadoop.hive.metastore.model.MConstraint)4 HashSet (java.util.HashSet)3 Database (org.apache.hadoop.hive.metastore.api.Database)3 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)3