use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class BaseSemanticAnalyzer method processPrimaryKeys.
/**
* Process the primary keys from the pkInfos structure and populate the SQLPrimaryKey list
* @param parent Parent of the primary key token node
* @param pkInfos primary key information
* @param primaryKeys SQLPrimaryKey list
* @param nametoFS Mapping from column name to field schema for the current table
* @throws SemanticException
*/
private static void processPrimaryKeys(ASTNode parent, List<PKInfo> pkInfos, List<SQLPrimaryKey> primaryKeys, Map<String, FieldSchema> nametoFS) throws SemanticException {
int cnt = 1;
String[] qualifiedTabName = getQualifiedTableName((ASTNode) parent.getChild(0));
for (int i = 0; i < pkInfos.size(); i++) {
String pk = pkInfos.get(i).colName;
if (nametoFS.containsKey(pk)) {
SQLPrimaryKey currPrimaryKey = new SQLPrimaryKey(qualifiedTabName[0], qualifiedTabName[1], pk, cnt++, pkInfos.get(i).constraintName, false, false, pkInfos.get(i).rely);
primaryKeys.add(currPrimaryKey);
} else {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(pk));
}
}
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class HBaseUtils method deserializePrimaryKey.
static List<SQLPrimaryKey> deserializePrimaryKey(String dbName, String tableName, byte[] value) throws InvalidProtocolBufferException {
HbaseMetastoreProto.PrimaryKey proto = HbaseMetastoreProto.PrimaryKey.parseFrom(value);
List<SQLPrimaryKey> result = new ArrayList<>();
for (HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn protoPkCol : proto.getColsList()) {
result.add(new SQLPrimaryKey(dbName, tableName, protoPkCol.getColumnName(), protoPkCol.getKeySeq(), proto.getPkName(), proto.getEnableConstraint(), proto.getValidateConstraint(), proto.getRelyConstraint()));
}
return result;
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class HBaseUtils method serializePrimaryKey.
/**
* Serialize the primary key for a table.
* @param pk Primary key columns. It is expected that all of these match to one pk, since
* anything else is meaningless.
* @return two byte arrays, first containts the hbase key, the second the serialized value.
*/
static byte[][] serializePrimaryKey(List<SQLPrimaryKey> pk) {
// First, figure out the dbName and tableName. We expect this to match for all list entries.
byte[][] result = new byte[2][];
String dbName = pk.get(0).getTable_db();
String tableName = pk.get(0).getTable_name();
result[0] = buildKey(HiveStringUtils.normalizeIdentifier(dbName), HiveStringUtils.normalizeIdentifier(tableName));
HbaseMetastoreProto.PrimaryKey.Builder builder = HbaseMetastoreProto.PrimaryKey.newBuilder();
// Encode the primary key, if present
builder.setPkName(pk.get(0).getPk_name());
builder.setEnableConstraint(pk.get(0).isEnable_cstr());
builder.setValidateConstraint(pk.get(0).isValidate_cstr());
builder.setRelyConstraint(pk.get(0).isRely_cstr());
for (SQLPrimaryKey pkcol : pk) {
HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.Builder pkColBuilder = HbaseMetastoreProto.PrimaryKey.PrimaryKeyColumn.newBuilder();
pkColBuilder.setColumnName(pkcol.getColumn_name());
pkColBuilder.setKeySeq(pkcol.getKey_seq());
builder.addCols(pkColBuilder);
}
result[1] = builder.build().toByteArray();
return result;
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class HBaseStore method addPrimaryKeys.
@Override
public void addPrimaryKeys(List<SQLPrimaryKey> pks) throws InvalidObjectException, MetaException {
boolean commit = false;
openTransaction();
try {
List<SQLPrimaryKey> currentPk = getHBase().getPrimaryKey(pks.get(0).getTable_db(), pks.get(0).getTable_name());
if (currentPk != null) {
throw new MetaException(" Primary key already exists for: " + tableNameForErrorMsg(pks.get(0).getTable_db(), pks.get(0).getTable_name()));
}
getHBase().putPrimaryKey(pks);
commit = true;
} catch (IOException e) {
LOG.error("Error writing primary key", e);
throw new MetaException("Error writing primary key: " + e.getMessage());
} finally {
commitOrRoleBack(commit);
}
}
use of org.apache.hadoop.hive.metastore.api.SQLPrimaryKey in project hive by apache.
the class GetPrimaryKeysOperation method runInternal.
@Override
public void runInternal() throws HiveSQLException {
setState(OperationState.RUNNING);
try {
IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();
PrimaryKeysRequest sqlReq = new PrimaryKeysRequest(schemaName, tableName);
List<SQLPrimaryKey> pks = metastoreClient.getPrimaryKeys(sqlReq);
if (pks == null) {
return;
}
for (SQLPrimaryKey pk : pks) {
rowSet.addRow(new Object[] { catalogName, pk.getTable_db(), pk.getTable_name(), pk.getColumn_name(), pk.getKey_seq(), pk.getPk_name() });
}
setState(OperationState.FINISHED);
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException(e);
}
}
Aggregations