use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class TestHBaseImport method setupObjectStore.
private void setupObjectStore(RawStore rdbms, String[] roles, String[] dbNames, String[] tokenIds, String[] tokens, String[] masterKeys, int now, boolean putConstraintsOnTables) throws MetaException, InvalidObjectException, NoSuchObjectException {
if (roles != null) {
for (int i = 0; i < roles.length; i++) {
rdbms.addRole(roles[i], "me");
}
}
for (int i = 0; i < dbNames.length; i++) {
rdbms.createDatabase(new Database(dbNames[i], "no description", "file:/tmp", emptyParameters));
List<FieldSchema> cols = new ArrayList<>();
cols.add(new FieldSchema("col1", "int", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, emptyParameters);
rdbms.createTable(new Table(tableNames[0], dbNames[i], "me", now, now, 0, sd, null, emptyParameters, null, null, null));
if (putConstraintsOnTables) {
rdbms.addPrimaryKeys(Collections.singletonList(new SQLPrimaryKey(dbNames[i], tableNames[0], "col1", 0, dbNames[i] + "_" + pkNames[0], true, false, true)));
}
List<FieldSchema> partCols = new ArrayList<>();
partCols.add(new FieldSchema("region", "string", ""));
rdbms.createTable(new Table(tableNames[1], dbNames[i], "me", now, now, 0, sd, partCols, emptyParameters, null, null, null));
if (putConstraintsOnTables) {
rdbms.addPrimaryKeys(Arrays.asList(new SQLPrimaryKey(dbNames[i], tableNames[1], "col1", 0, dbNames[i] + "_" + pkNames[1], true, false, true)));
rdbms.addForeignKeys(Collections.singletonList(new SQLForeignKey(dbNames[i], tableNames[0], "col1", dbNames[i], tableNames[1], "col1", 0, 1, 2, dbNames[i] + "_" + fkNames[1], dbNames[i] + "_" + pkNames[0], true, false, true)));
}
for (int j = 0; j < partVals.length; j++) {
StorageDescriptor psd = new StorageDescriptor(sd);
psd.setLocation("file:/tmp/region=" + partVals[j]);
Partition part = new Partition(Arrays.asList(partVals[j]), dbNames[i], tableNames[1], now, now, psd, emptyParameters);
rdbms.addPartition(part);
}
for (String funcName : funcNames) {
LOG.debug("Creating new function " + dbNames[i] + "." + funcName);
rdbms.createFunction(new Function(funcName, dbNames[i], "classname", "ownername", PrincipalType.USER, (int) System.currentTimeMillis() / 1000, FunctionType.JAVA, Arrays.asList(new ResourceUri(ResourceType.JAR, "uri"))));
}
for (String indexName : indexNames) {
LOG.debug("Creating new index " + dbNames[i] + "." + tableNames[0] + "." + indexName);
String indexTableName = tableNames[0] + "__" + indexName + "__";
rdbms.createTable(new Table(indexTableName, dbNames[i], "me", now, now, 0, sd, partCols, emptyParameters, null, null, null));
rdbms.addIndex(new Index(indexName, null, dbNames[i], tableNames[0], now, now, indexTableName, sd, emptyParameters, false));
}
}
if (tokenIds != null) {
for (int i = 0; i < tokenIds.length; i++) rdbms.addToken(tokenIds[i], tokens[i]);
}
if (masterKeys != null) {
for (int i = 0; i < masterKeys.length; i++) {
masterKeySeqs.add(rdbms.addMasterKey(masterKeys[i]));
}
}
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class TestHBaseImport method importTablesWithConstraints.
@Test
public void importTablesWithConstraints() throws Exception {
RawStore rdbms;
rdbms = new ObjectStore();
rdbms.setConf(conf);
String[] dbNames = new String[] { "onetabwcdb1", "onetabwcdb2" };
int now = (int) System.currentTimeMillis() / 1000;
setupObjectStore(rdbms, dbNames, now, true);
// Create the database so I can put the table in it.
store.createDatabase(new Database(dbNames[0], "no description", "file:/tmp", emptyParameters));
HBaseImport importer = new HBaseImport("-d", dbNames[0]);
importer.setConnections(rdbms, store);
importer.run();
Database db = store.getDatabase(dbNames[0]);
Assert.assertNotNull(db);
Table table = store.getTable(db.getName(), tableNames[1]);
Assert.assertNotNull(table);
List<SQLPrimaryKey> pk = store.getPrimaryKeys(dbNames[0], tableNames[1]);
Assert.assertNotNull(pk);
Assert.assertEquals(1, pk.size());
Assert.assertEquals(dbNames[0], pk.get(0).getTable_db());
Assert.assertEquals(tableNames[1], pk.get(0).getTable_name());
Assert.assertEquals(0, pk.get(0).getKey_seq());
Assert.assertEquals("col1", pk.get(0).getColumn_name());
Assert.assertEquals(dbNames[0] + "_" + pkNames[1], pk.get(0).getPk_name());
Assert.assertTrue(pk.get(0).isEnable_cstr());
Assert.assertFalse(pk.get(0).isValidate_cstr());
Assert.assertTrue(pk.get(0).isRely_cstr());
List<SQLForeignKey> fk = store.getForeignKeys(dbNames[0], tableNames[0], dbNames[0], tableNames[1]);
Assert.assertNotNull(fk);
Assert.assertEquals(1, fk.size());
Assert.assertEquals(dbNames[0], fk.get(0).getPktable_db());
Assert.assertEquals(tableNames[0], fk.get(0).getPktable_name());
Assert.assertEquals("col1", fk.get(0).getPkcolumn_name());
Assert.assertEquals(dbNames[0], fk.get(0).getFktable_db());
Assert.assertEquals(tableNames[1], fk.get(0).getFktable_name());
Assert.assertEquals("col1", fk.get(0).getFkcolumn_name());
Assert.assertEquals(0, fk.get(0).getKey_seq());
Assert.assertEquals(1, fk.get(0).getUpdate_rule());
Assert.assertEquals(2, fk.get(0).getDelete_rule());
Assert.assertEquals(dbNames[0] + "_" + fkNames[1], fk.get(0).getFk_name());
Assert.assertTrue(pk.get(0).isEnable_cstr());
Assert.assertFalse(pk.get(0).isValidate_cstr());
Assert.assertTrue(pk.get(0).isRely_cstr());
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class TestHBaseStore method doublePrimaryKey.
// Try adding a primary key when one already exists
@Test(expected = MetaException.class)
public void doublePrimaryKey() throws Exception {
String tableName = "pktable";
String pkName = "test_pk";
String[] pkColNames = { "col0" };
Table table = createMultiColumnTable(tableName, "int");
List<SQLPrimaryKey> pk = Arrays.asList(new SQLPrimaryKey(DB, tableName, pkColNames[0], 0, pkName, true, false, true));
store.createTableWithConstraints(table, pk, null);
store.addPrimaryKeys(pk);
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class TestHBaseStore method addMultiColPrimaryKey.
// Test that we can add a primary key with multiple columns
@Test
public void addMultiColPrimaryKey() throws Exception {
String tableName = "mcpktable";
String pkName = "test_pk";
String[] pkColNames = { "col0", "col1", "col2" };
Table table = createMultiColumnTable(tableName, "int", "varchar(32)", "decimal(10,2)");
List<SQLPrimaryKey> pk = Arrays.asList(new SQLPrimaryKey(DB, tableName, pkColNames[1], 0, pkName, false, true, true), new SQLPrimaryKey(DB, tableName, pkColNames[2], 1, pkName, false, true, true));
store.createTable(table);
store.addPrimaryKeys(pk);
Assert.assertNotNull(pk);
Assert.assertEquals(2, pk.size());
SQLPrimaryKey[] sorted = pk.toArray(new SQLPrimaryKey[2]);
Arrays.sort(sorted, new Comparator<SQLPrimaryKey>() {
@Override
public int compare(SQLPrimaryKey o1, SQLPrimaryKey o2) {
return o1.getColumn_name().compareTo(o2.getColumn_name());
}
});
for (int i = 0; i < 2; i++) {
Assert.assertEquals(DB, sorted[i].getTable_db());
Assert.assertEquals(tableName, sorted[i].getTable_name());
Assert.assertEquals(pkColNames[i + 1], sorted[i].getColumn_name());
Assert.assertEquals(i, sorted[i].getKey_seq());
Assert.assertEquals(pkName, sorted[i].getPk_name());
Assert.assertFalse(sorted[i].isEnable_cstr());
Assert.assertTrue(sorted[i].isValidate_cstr());
Assert.assertTrue(sorted[i].isRely_cstr());
}
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class DbNotificationListener method onAddPrimaryKey.
/**
* @param addPrimaryKeyEvent add primary key event
* @throws MetaException
*/
@Override
public void onAddPrimaryKey(AddPrimaryKeyEvent addPrimaryKeyEvent) throws MetaException {
List<SQLPrimaryKey> cols = addPrimaryKeyEvent.getPrimaryKeyCols();
if (cols.size() > 0) {
NotificationEvent event = new NotificationEvent(0, now(), EventType.ADD_PRIMARYKEY.toString(), msgFactory.buildAddPrimaryKeyMessage(addPrimaryKeyEvent.getPrimaryKeyCols()).toString());
event.setDbName(cols.get(0).getTable_db());
event.setTableName(cols.get(0).getTable_name());
process(event, addPrimaryKeyEvent);
}
}
Aggregations