Search in sources :

Example 26 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class HBaseStore method addForeignKeys.

@Override
public void addForeignKeys(List<SQLForeignKey> fks) throws InvalidObjectException, MetaException {
    boolean commit = false;
    openTransaction();
    try {
        // Fetch the existing keys (if any) and add in these new ones
        List<SQLForeignKey> existing = getHBase().getForeignKeys(fks.get(0).getFktable_db(), fks.get(0).getFktable_name());
        if (existing == null)
            existing = new ArrayList<>(fks.size());
        existing.addAll(fks);
        getHBase().putForeignKeys(existing);
        commit = true;
    } catch (IOException e) {
        LOG.error("Error writing foreign keys", e);
        throw new MetaException("Error writing foreign keys: " + e.getMessage());
    } finally {
        commitOrRoleBack(commit);
    }
}
Also used : SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) ArrayList(java.util.ArrayList) IOException(java.io.IOException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 27 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class TestHBaseImport method setupObjectStore.

private void setupObjectStore(RawStore rdbms, String[] roles, String[] dbNames, String[] tokenIds, String[] tokens, String[] masterKeys, int now, boolean putConstraintsOnTables) throws MetaException, InvalidObjectException, NoSuchObjectException {
    if (roles != null) {
        for (int i = 0; i < roles.length; i++) {
            rdbms.addRole(roles[i], "me");
        }
    }
    for (int i = 0; i < dbNames.length; i++) {
        rdbms.createDatabase(new Database(dbNames[i], "no description", "file:/tmp", emptyParameters));
        List<FieldSchema> cols = new ArrayList<>();
        cols.add(new FieldSchema("col1", "int", "nocomment"));
        SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
        StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
        rdbms.createTable(new Table(tableNames[0], dbNames[i], "me", now, now, 0, sd, null, emptyParameters, null, null, null));
        if (putConstraintsOnTables) {
            rdbms.addPrimaryKeys(Collections.singletonList(new SQLPrimaryKey(dbNames[i], tableNames[0], "col1", 0, dbNames[i] + "_" + pkNames[0], true, false, true)));
        }
        List<FieldSchema> partCols = new ArrayList<>();
        partCols.add(new FieldSchema("region", "string", ""));
        rdbms.createTable(new Table(tableNames[1], dbNames[i], "me", now, now, 0, sd, partCols, emptyParameters, null, null, null));
        if (putConstraintsOnTables) {
            rdbms.addPrimaryKeys(Arrays.asList(new SQLPrimaryKey(dbNames[i], tableNames[1], "col1", 0, dbNames[i] + "_" + pkNames[1], true, false, true)));
            rdbms.addForeignKeys(Collections.singletonList(new SQLForeignKey(dbNames[i], tableNames[0], "col1", dbNames[i], tableNames[1], "col1", 0, 1, 2, dbNames[i] + "_" + fkNames[1], dbNames[i] + "_" + pkNames[0], true, false, true)));
        }
        for (int j = 0; j < partVals.length; j++) {
            StorageDescriptor psd = new StorageDescriptor(sd);
            psd.setLocation("file:/tmp/region=" + partVals[j]);
            Partition part = new Partition(Arrays.asList(partVals[j]), dbNames[i], tableNames[1], now, now, psd, emptyParameters);
            rdbms.addPartition(part);
        }
        for (String funcName : funcNames) {
            LOG.debug("Creating new function " + dbNames[i] + "." + funcName);
            rdbms.createFunction(new Function(funcName, dbNames[i], "classname", "ownername", PrincipalType.USER, (int) System.currentTimeMillis() / 1000, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "uri"))));
        }
        for (String indexName : indexNames) {
            LOG.debug("Creating new index " + dbNames[i] + "." + tableNames[0] + "." + indexName);
            String indexTableName = tableNames[0] + "__" + indexName + "__";
            rdbms.createTable(new Table(indexTableName, dbNames[i], "me", now, now, 0, sd, partCols, emptyParameters, null, null, null));
            rdbms.addIndex(new Index(indexName, null, dbNames[i], tableNames[0], now, now, indexTableName, sd, emptyParameters, false));
        }
    }
    if (tokenIds != null) {
        for (int i = 0; i < tokenIds.length; i++) rdbms.addToken(tokenIds[i], tokens[i]);
    }
    if (masterKeys != null) {
        for (int i = 0; i < masterKeys.length; i++) {
            masterKeySeqs.add(rdbms.addMasterKey(masterKeys[i]));
        }
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) SerDeInfo(org.apache.hadoop.hive.metastore.api.SerDeInfo) ArrayList(java.util.ArrayList) StorageDescriptor(org.apache.hadoop.hive.metastore.api.StorageDescriptor) Index(org.apache.hadoop.hive.metastore.api.Index) Function(org.apache.hadoop.hive.metastore.api.Function) Database(org.apache.hadoop.hive.metastore.api.Database)

Example 28 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class TestHBaseImport method importTablesWithConstraints.

@Test
public void importTablesWithConstraints() throws Exception {
    RawStore rdbms;
    rdbms = new ObjectStore();
    rdbms.setConf(conf);
    String[] dbNames = new String[] { "onetabwcdb1", "onetabwcdb2" };
    int now = (int) System.currentTimeMillis() / 1000;
    setupObjectStore(rdbms, dbNames, now, true);
    // Create the database so I can put the table in it.
    store.createDatabase(new Database(dbNames[0], "no description", "file:/tmp", emptyParameters));
    HBaseImport importer = new HBaseImport("-d", dbNames[0]);
    importer.setConnections(rdbms, store);
    importer.run();
    Database db = store.getDatabase(dbNames[0]);
    Assert.assertNotNull(db);
    Table table = store.getTable(db.getName(), tableNames[1]);
    Assert.assertNotNull(table);
    List<SQLPrimaryKey> pk = store.getPrimaryKeys(dbNames[0], tableNames[1]);
    Assert.assertNotNull(pk);
    Assert.assertEquals(1, pk.size());
    Assert.assertEquals(dbNames[0], pk.get(0).getTable_db());
    Assert.assertEquals(tableNames[1], pk.get(0).getTable_name());
    Assert.assertEquals(0, pk.get(0).getKey_seq());
    Assert.assertEquals("col1", pk.get(0).getColumn_name());
    Assert.assertEquals(dbNames[0] + "_" + pkNames[1], pk.get(0).getPk_name());
    Assert.assertTrue(pk.get(0).isEnable_cstr());
    Assert.assertFalse(pk.get(0).isValidate_cstr());
    Assert.assertTrue(pk.get(0).isRely_cstr());
    List<SQLForeignKey> fk = store.getForeignKeys(dbNames[0], tableNames[0], dbNames[0], tableNames[1]);
    Assert.assertNotNull(fk);
    Assert.assertEquals(1, fk.size());
    Assert.assertEquals(dbNames[0], fk.get(0).getPktable_db());
    Assert.assertEquals(tableNames[0], fk.get(0).getPktable_name());
    Assert.assertEquals("col1", fk.get(0).getPkcolumn_name());
    Assert.assertEquals(dbNames[0], fk.get(0).getFktable_db());
    Assert.assertEquals(tableNames[1], fk.get(0).getFktable_name());
    Assert.assertEquals("col1", fk.get(0).getFkcolumn_name());
    Assert.assertEquals(0, fk.get(0).getKey_seq());
    Assert.assertEquals(1, fk.get(0).getUpdate_rule());
    Assert.assertEquals(2, fk.get(0).getDelete_rule());
    Assert.assertEquals(dbNames[0] + "_" + fkNames[1], fk.get(0).getFk_name());
    Assert.assertTrue(pk.get(0).isEnable_cstr());
    Assert.assertFalse(pk.get(0).isValidate_cstr());
    Assert.assertTrue(pk.get(0).isRely_cstr());
}
Also used : TestObjectStore(org.apache.hadoop.hive.metastore.TestObjectStore) ObjectStore(org.apache.hadoop.hive.metastore.ObjectStore) SQLPrimaryKey(org.apache.hadoop.hive.metastore.api.SQLPrimaryKey) Table(org.apache.hadoop.hive.metastore.api.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Database(org.apache.hadoop.hive.metastore.api.Database) RawStore(org.apache.hadoop.hive.metastore.RawStore) Test(org.junit.Test)

Example 29 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class TestHBaseStore method addSecondForeignKeys.

// Test that we can add a foreign key when one already exists
@Test
public void addSecondForeignKeys() throws Exception {
    String tableName = "mcfktable";
    String pkTable = "pktable";
    String pkTable2 = "pktable2";
    String pkName = "test_pk";
    String pkName2 = "test_pk2";
    String fkName = "test_fk";
    String fkName2 = "test_fk2";
    String[] fkColNames = { "col0", "col1", "col2" };
    String[] pkColNames = { "pcol0", "pcol1" };
    String[] pkColNames2 = { "p2col0" };
    Table table = createMultiColumnTable(tableName, "int", "double", "timestamp");
    List<SQLForeignKey> fk = Arrays.asList(new SQLForeignKey(DB, pkTable, pkColNames[0], DB, tableName, fkColNames[1], 0, 1, 2, fkName, pkName, true, false, true), new SQLForeignKey(DB, pkTable, pkColNames[1], DB, tableName, fkColNames[2], 1, 1, 2, fkName, pkName, true, false, true));
    store.createTable(table);
    store.addForeignKeys(fk);
    fk = Arrays.asList(new SQLForeignKey(DB, pkTable2, pkColNames2[0], DB, tableName, fkColNames[0], 0, 1, 2, fkName2, pkName2, true, false, true));
    store.addForeignKeys(fk);
    fk = store.getForeignKeys(DB, pkTable, DB, tableName);
    Assert.assertNotNull(fk);
    Assert.assertEquals(2, fk.size());
    SQLForeignKey[] sorted = fk.toArray(new SQLForeignKey[2]);
    Arrays.sort(sorted, new Comparator<SQLForeignKey>() {

        @Override
        public int compare(SQLForeignKey o1, SQLForeignKey o2) {
            if (o1.getFk_name().equals(o2.getFk_name())) {
                return o1.getFkcolumn_name().compareTo(o2.getFkcolumn_name());
            } else {
                return o1.getFk_name().compareTo(o2.getFk_name());
            }
        }
    });
    for (int i = 0; i < 2; i++) {
        Assert.assertEquals(DB, sorted[i].getPktable_db());
        Assert.assertEquals(pkTable, sorted[i].getPktable_name());
        Assert.assertEquals(pkColNames[i], sorted[i].getPkcolumn_name());
        Assert.assertEquals(DB, sorted[i].getFktable_db());
        Assert.assertEquals(tableName, sorted[i].getFktable_name());
        Assert.assertEquals(fkColNames[i + 1], sorted[i].getFkcolumn_name());
        Assert.assertEquals(i, sorted[i].getKey_seq());
        Assert.assertEquals(1, sorted[i].getUpdate_rule());
        Assert.assertEquals(2, sorted[i].getDelete_rule());
        Assert.assertEquals(fkName, sorted[i].getFk_name());
        Assert.assertEquals(pkName, sorted[i].getPk_name());
        Assert.assertTrue(sorted[i].isEnable_cstr());
        Assert.assertFalse(sorted[i].isValidate_cstr());
        Assert.assertTrue(sorted[i].isRely_cstr());
    }
    fk = store.getForeignKeys(DB, pkTable2, DB, tableName);
    Assert.assertNotNull(fk);
    Assert.assertEquals(1, fk.size());
    Assert.assertEquals(DB, fk.get(0).getPktable_db());
    Assert.assertEquals(pkTable2, fk.get(0).getPktable_name());
    Assert.assertEquals(pkColNames2[0], fk.get(0).getPkcolumn_name());
    Assert.assertEquals(DB, fk.get(0).getFktable_db());
    Assert.assertEquals(tableName, fk.get(0).getFktable_name());
    Assert.assertEquals(fkColNames[0], fk.get(0).getFkcolumn_name());
    Assert.assertEquals(0, fk.get(0).getKey_seq());
    Assert.assertEquals(1, fk.get(0).getUpdate_rule());
    Assert.assertEquals(2, fk.get(0).getDelete_rule());
    Assert.assertEquals(fkName2, fk.get(0).getFk_name());
    Assert.assertEquals(pkName2, fk.get(0).getPk_name());
    Assert.assertTrue(fk.get(0).isEnable_cstr());
    Assert.assertFalse(fk.get(0).isValidate_cstr());
    Assert.assertTrue(fk.get(0).isRely_cstr());
    // Check that passing null gets all the foreign keys
    fk = store.getForeignKeys(null, null, DB, tableName);
    Assert.assertNotNull(fk);
    Assert.assertEquals(3, fk.size());
    store.dropConstraint(DB, tableName, fkName);
    fk = store.getForeignKeys(DB, pkTable2, DB, tableName);
    Assert.assertNotNull(fk);
    Assert.assertEquals(1, fk.size());
    Assert.assertEquals(DB, fk.get(0).getPktable_db());
    Assert.assertEquals(pkTable2, fk.get(0).getPktable_name());
    Assert.assertEquals(pkColNames2[0], fk.get(0).getPkcolumn_name());
    Assert.assertEquals(DB, fk.get(0).getFktable_db());
    Assert.assertEquals(tableName, fk.get(0).getFktable_name());
    Assert.assertEquals(fkColNames[0], fk.get(0).getFkcolumn_name());
    Assert.assertEquals(0, fk.get(0).getKey_seq());
    Assert.assertEquals(1, fk.get(0).getUpdate_rule());
    Assert.assertEquals(2, fk.get(0).getDelete_rule());
    Assert.assertEquals(fkName2, fk.get(0).getFk_name());
    Assert.assertEquals(pkName2, fk.get(0).getPk_name());
    Assert.assertTrue(fk.get(0).isEnable_cstr());
    Assert.assertFalse(fk.get(0).isValidate_cstr());
    Assert.assertTrue(fk.get(0).isRely_cstr());
    store.dropConstraint(DB, tableName, fkName2);
    fk = store.getForeignKeys(DB, pkTable2, DB, tableName);
    Assert.assertNull(fk);
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Test(org.junit.Test)

Example 30 with SQLForeignKey

use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.

the class TestHBaseStore method createTableWithForeignKey.

@Test
public void createTableWithForeignKey() throws Exception {
    String tableName = "fktable";
    String pkTable = "pktable";
    String pkName = "test_pk";
    String fkName = "test_fk";
    String[] fkColNames = { "col0" };
    String[] pkColNames = { "pcol0" };
    Table table = createMultiColumnTable(tableName, "int");
    List<SQLForeignKey> fk = Arrays.asList(new SQLForeignKey(DB, pkTable, pkColNames[0], DB, tableName, fkColNames[0], 0, 1, 2, fkName, pkName, true, false, false));
    store.createTableWithConstraints(table, null, fk);
    fk = store.getForeignKeys(DB, pkTable, DB, tableName);
    Assert.assertNotNull(fk);
    Assert.assertEquals(1, fk.size());
    Assert.assertEquals(DB, fk.get(0).getPktable_db());
    Assert.assertEquals(pkTable, fk.get(0).getPktable_name());
    Assert.assertEquals(pkColNames[0], fk.get(0).getPkcolumn_name());
    Assert.assertEquals(DB, fk.get(0).getFktable_db());
    Assert.assertEquals(tableName, fk.get(0).getFktable_name());
    Assert.assertEquals(fkColNames[0], fk.get(0).getFkcolumn_name());
    Assert.assertEquals(0, fk.get(0).getKey_seq());
    Assert.assertEquals(1, fk.get(0).getUpdate_rule());
    Assert.assertEquals(2, fk.get(0).getDelete_rule());
    Assert.assertEquals(fkName, fk.get(0).getFk_name());
    Assert.assertEquals(pkName, fk.get(0).getPk_name());
    Assert.assertTrue(fk.get(0).isEnable_cstr());
    Assert.assertFalse(fk.get(0).isValidate_cstr());
    Assert.assertFalse(fk.get(0).isRely_cstr());
}
Also used : Table(org.apache.hadoop.hive.metastore.api.Table) SQLForeignKey(org.apache.hadoop.hive.metastore.api.SQLForeignKey) Test(org.junit.Test)

Aggregations

SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)46 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)28 Test (org.junit.Test)20 Table (org.apache.hadoop.hive.metastore.api.Table)19 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)15 ArrayList (java.util.ArrayList)13 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)13 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)13 SQLForeignKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLForeignKeyBuilder)13 SQLPrimaryKeyBuilder (org.apache.hadoop.hive.metastore.client.builder.SQLPrimaryKeyBuilder)13 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)12 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)11 ForeignKeysRequest (org.apache.hadoop.hive.metastore.api.ForeignKeysRequest)9 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)6 IOException (java.io.IOException)5 HashMap (java.util.HashMap)5 InvalidObjectException (org.apache.hadoop.hive.metastore.api.InvalidObjectException)5 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)5 TApplicationException (org.apache.thrift.TApplicationException)5 Tree (org.antlr.runtime.tree.Tree)3