use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class HBaseReadWrite method printOneTable.
private String printOneTable(Result result) throws IOException, TException {
byte[] key = result.getRow();
HBaseUtils.StorageDescriptorParts sdParts = HBaseUtils.deserializeTable(key, result.getValue(CATALOG_CF, CATALOG_COL));
StringBuilder builder = new StringBuilder();
builder.append(dumpThriftObject(sdParts.containingTable)).append(" sdHash: ").append(Base64.encodeBase64URLSafeString(sdParts.sdHash)).append(" stats:");
NavigableMap<byte[], byte[]> statsCols = result.getFamilyMap(STATS_CF);
for (Map.Entry<byte[], byte[]> statsCol : statsCols.entrySet()) {
builder.append(" column ").append(new String(statsCol.getKey(), HBaseUtils.ENCODING)).append(": ");
ColumnStatistics pcs = buildColStats(key, true);
ColumnStatisticsObj cso = HBaseUtils.deserializeStatsForOneColumn(pcs, statsCol.getValue());
builder.append(dumpThriftObject(cso));
}
// Add the primary key
List<SQLPrimaryKey> pk = getPrimaryKey(sdParts.containingTable.getDbName(), sdParts.containingTable.getTableName());
if (pk != null && pk.size() > 0) {
builder.append(" primary key: ");
for (SQLPrimaryKey pkcol : pk) builder.append(dumpThriftObject(pkcol));
}
// Add any foreign keys
List<SQLForeignKey> fks = getForeignKeys(sdParts.containingTable.getDbName(), sdParts.containingTable.getTableName());
if (fks != null && fks.size() > 0) {
builder.append(" foreign keys: ");
for (SQLForeignKey fkcol : fks) builder.append(dumpThriftObject(fkcol));
}
return builder.toString();
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class BaseSemanticAnalyzer method processForeignKeys.
/**
* Process the foreign keys from the AST and populate the foreign keys in the SQLForeignKey list
* @param parent Parent of the foreign key token node
* @param child Foreign Key token node
* @param foreignKeys SQLForeignKey list
* @throws SemanticException
*/
protected static void processForeignKeys(ASTNode parent, ASTNode child, List<SQLForeignKey> foreignKeys) throws SemanticException {
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
// The ANTLR grammar looks like :
// 1. KW_CONSTRAINT idfr=identifier KW_FOREIGN KW_KEY fkCols=columnParenthesesList
// KW_REFERENCES tabName=tableName parCols=columnParenthesesList
// enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
// -> ^(TOK_FOREIGN_KEY $idfr $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec)
// when the user specifies the constraint name (i.e. child.getChildCount() == 7)
// 2. KW_FOREIGN KW_KEY fkCols=columnParenthesesList
// KW_REFERENCES tabName=tableName parCols=columnParenthesesList
// enableSpec=enableSpecification validateSpec=validateSpecification relySpec=relySpecification
// -> ^(TOK_FOREIGN_KEY $fkCols $tabName $parCols $relySpec $enableSpec $validateSpec)
// when the user does not specify the constraint name (i.e. child.getChildCount() == 6)
boolean userSpecifiedConstraintName = child.getChildCount() == 7;
int fkIndex = userSpecifiedConstraintName ? 1 : 0;
int ptIndex = fkIndex + 1;
int pkIndex = ptIndex + 1;
int relyIndex = pkIndex + 1;
if (child.getChildCount() <= fkIndex || child.getChildCount() <= pkIndex || child.getChildCount() <= ptIndex) {
throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg());
}
String[] parentDBTbl = getQualifiedTableName((ASTNode) child.getChild(ptIndex));
if (child.getChild(fkIndex).getChildCount() != child.getChild(pkIndex).getChildCount()) {
throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" The number of foreign key columns should be same as number of parent key columns "));
}
for (int j = 0; j < child.getChild(fkIndex).getChildCount(); j++) {
SQLForeignKey sqlForeignKey = new SQLForeignKey();
Tree fkgrandChild = child.getChild(fkIndex).getChild(j);
checkColumnName(fkgrandChild.getText());
boolean rely = child.getChild(relyIndex).getType() == HiveParser.TOK_VALIDATE;
boolean enable = child.getChild(relyIndex + 1).getType() == HiveParser.TOK_ENABLE;
boolean validate = child.getChild(relyIndex + 2).getType() == HiveParser.TOK_VALIDATE;
if (enable) {
throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" ENABLE feature not supported yet"));
}
if (validate) {
throw new SemanticException(ErrorMsg.INVALID_FK_SYNTAX.getMsg(" VALIDATE feature not supported yet"));
}
sqlForeignKey.setRely_cstr(rely);
sqlForeignKey.setPktable_db(parentDBTbl[0]);
sqlForeignKey.setPktable_name(parentDBTbl[1]);
sqlForeignKey.setFktable_db(qualifiedTabName[0]);
sqlForeignKey.setFktable_name(qualifiedTabName[1]);
sqlForeignKey.setFkcolumn_name(unescapeIdentifier(fkgrandChild.getText().toLowerCase()));
Tree pkgrandChild = child.getChild(pkIndex).getChild(j);
sqlForeignKey.setPkcolumn_name(unescapeIdentifier(pkgrandChild.getText().toLowerCase()));
sqlForeignKey.setKey_seq(j + 1);
if (userSpecifiedConstraintName) {
sqlForeignKey.setFk_name(unescapeIdentifier(child.getChild(0).getText().toLowerCase()));
}
foreignKeys.add(sqlForeignKey);
}
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class HBaseUtils method serializeForeignKeys.
/**
* Serialize the foreign key(s) for a table.
* @param fks Foreign key columns. These may belong to multiple foreign keys.
* @return two byte arrays, first containts the key, the second the serialized value.
*/
static byte[][] serializeForeignKeys(List<SQLForeignKey> fks) {
// First, figure out the dbName and tableName. We expect this to match for all list entries.
byte[][] result = new byte[2][];
String dbName = fks.get(0).getFktable_db();
String tableName = fks.get(0).getFktable_name();
result[0] = buildKey(HiveStringUtils.normalizeIdentifier(dbName), HiveStringUtils.normalizeIdentifier(tableName));
HbaseMetastoreProto.ForeignKeys.Builder builder = HbaseMetastoreProto.ForeignKeys.newBuilder();
// Encode any foreign keys we find. This can be complex because there may be more than
// one foreign key in here, so we need to detect that.
Map<String, HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder> fkBuilders = new HashMap<>();
for (SQLForeignKey fkcol : fks) {
HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder fkBuilder = fkBuilders.get(fkcol.getFk_name());
if (fkBuilder == null) {
// We haven't seen this key before, so add it
fkBuilder = HbaseMetastoreProto.ForeignKeys.ForeignKey.newBuilder();
fkBuilder.setFkName(fkcol.getFk_name());
fkBuilder.setReferencedDbName(fkcol.getPktable_db());
assert dbName.equals(fkcol.getFktable_db()) : "You switched databases on me!";
fkBuilder.setReferencedTableName(fkcol.getPktable_name());
assert tableName.equals(fkcol.getFktable_name()) : "You switched tables on me!";
fkBuilder.setReferencedPkName(fkcol.getPk_name());
fkBuilder.setUpdateRule(fkcol.getUpdate_rule());
fkBuilder.setDeleteRule(fkcol.getDelete_rule());
fkBuilder.setEnableConstraint(fkcol.isEnable_cstr());
fkBuilder.setValidateConstraint(fkcol.isValidate_cstr());
fkBuilder.setRelyConstraint(fkcol.isRely_cstr());
fkBuilders.put(fkcol.getFk_name(), fkBuilder);
}
HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.Builder fkColBuilder = HbaseMetastoreProto.ForeignKeys.ForeignKey.ForeignKeyColumn.newBuilder();
fkColBuilder.setColumnName(fkcol.getFkcolumn_name());
fkColBuilder.setReferencedColumnName(fkcol.getPkcolumn_name());
fkColBuilder.setKeySeq(fkcol.getKey_seq());
fkBuilder.addCols(fkColBuilder);
}
for (HbaseMetastoreProto.ForeignKeys.ForeignKey.Builder fkBuilder : fkBuilders.values()) {
builder.addFks(fkBuilder);
}
result[1] = builder.build().toByteArray();
return result;
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class HBaseStore method getForeignKeys.
@Override
public List<SQLForeignKey> getForeignKeys(String parent_db_name, String parent_tbl_name, String foreign_db_name, String foreign_tbl_name) throws MetaException {
boolean commit = false;
openTransaction();
try {
List<SQLForeignKey> fks = getHBase().getForeignKeys(foreign_db_name, foreign_tbl_name);
if (fks == null || fks.size() == 0)
return null;
List<SQLForeignKey> result = new ArrayList<>(fks.size());
for (SQLForeignKey fkcol : fks) {
if ((parent_db_name == null || fkcol.getPktable_db().equals(parent_db_name)) && (parent_tbl_name == null || fkcol.getPktable_name().equals(parent_tbl_name))) {
result.add(fkcol);
}
}
commit = true;
return result;
} catch (IOException e) {
LOG.error("Unable to get foreign key", e);
throw new MetaException("Error reading db " + e.getMessage());
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.SQLForeignKey in project hive by apache.
the class HBaseStore method dropConstraint.
@Override
public void dropConstraint(String dbName, String tableName, String constraintName) throws NoSuchObjectException {
// This is something of pain, since we have to search both primary key and foreign key to see
// which they want to drop.
boolean commit = false;
openTransaction();
try {
List<SQLPrimaryKey> pk = getHBase().getPrimaryKey(dbName, tableName);
if (pk != null && pk.size() > 0 && pk.get(0).getPk_name().equals(constraintName)) {
getHBase().deletePrimaryKey(dbName, tableName);
commit = true;
return;
}
List<SQLForeignKey> fks = getHBase().getForeignKeys(dbName, tableName);
if (fks != null && fks.size() > 0) {
List<SQLForeignKey> newKeyList = new ArrayList<>(fks.size());
// Make a new list of keys that excludes all columns from the constraint we're dropping.
for (SQLForeignKey fkcol : fks) {
if (!fkcol.getFk_name().equals(constraintName))
newKeyList.add(fkcol);
}
// have the existing keys. Otherwise drop the foreign keys all together.
if (newKeyList.size() > 0)
getHBase().putForeignKeys(newKeyList);
else
getHBase().deleteForeignKeys(dbName, tableName);
commit = true;
return;
}
commit = true;
throw new NoSuchObjectException("Unable to find constraint named " + constraintName + " on table " + tableNameForErrorMsg(dbName, tableName));
} catch (IOException e) {
LOG.error("Error fetching primary key for table " + tableNameForErrorMsg(dbName, tableName), e);
throw new NoSuchObjectException("Error fetching primary key for table " + tableNameForErrorMsg(dbName, tableName) + " : " + e.getMessage());
} finally {
commitOrRoleBack(commit);
}
}
Aggregations