use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.
the class DDLSemanticAnalyzer method addTablePartsOutputs.
/**
* Add the table partitions to be modified in the output, so that it is available for the
* pre-execution hook. If the partition does not exist, throw an error if
* throwIfNonExistent is true, otherwise ignore it.
*/
private void addTablePartsOutputs(Table table, List<Map<String, String>> partSpecs, boolean throwIfNonExistent, boolean allowMany, ASTNode ast, WriteEntity.WriteType writeType) throws SemanticException {
Iterator<Map<String, String>> i;
int index;
for (i = partSpecs.iterator(), index = 1; i.hasNext(); ++index) {
Map<String, String> partSpec = i.next();
List<Partition> parts = null;
if (allowMany) {
try {
parts = db.getPartitions(table, partSpec);
} catch (HiveException e) {
LOG.error("Got HiveException during obtaining list of partitions" + StringUtils.stringifyException(e));
throw new SemanticException(e.getMessage(), e);
}
} else {
parts = new ArrayList<Partition>();
try {
Partition p = db.getPartition(table, partSpec, false);
if (p != null) {
parts.add(p);
}
} catch (HiveException e) {
LOG.debug("Wrong specification" + StringUtils.stringifyException(e));
throw new SemanticException(e.getMessage(), e);
}
}
if (parts.isEmpty()) {
if (throwIfNonExistent) {
throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(ast.getChild(index)));
}
}
for (Partition p : parts) {
// Don't request any locks here, as the table has already been locked.
outputs.add(new WriteEntity(p, writeType));
}
}
}
use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.
the class DDLSemanticAnalyzer method addTableDropPartsOutputs.
/**
* Add the table partitions to be modified in the output, so that it is available for the
* pre-execution hook. If the partition does not exist, throw an error if
* throwIfNonExistent is true, otherwise ignore it.
*/
private void addTableDropPartsOutputs(Table tab, Collection<List<ExprNodeGenericFuncDesc>> partSpecs, boolean throwIfNonExistent) throws SemanticException {
for (List<ExprNodeGenericFuncDesc> specs : partSpecs) {
for (ExprNodeGenericFuncDesc partSpec : specs) {
List<Partition> parts = new ArrayList<Partition>();
boolean hasUnknown = false;
try {
hasUnknown = db.getPartitionsByExpr(tab, partSpec, conf, parts);
} catch (Exception e) {
throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.getExprString()), e);
}
if (hasUnknown) {
throw new SemanticException("Unexpected unknown partitions for " + partSpec.getExprString());
}
// earlier... If we get rid of output, we can get rid of this.
if (parts.isEmpty()) {
if (throwIfNonExistent) {
throw new SemanticException(ErrorMsg.INVALID_PARTITION.getMsg(partSpec.getExprString()));
}
}
for (Partition p : parts) {
outputs.add(new WriteEntity(p, WriteEntity.WriteType.DDL_EXCLUSIVE));
}
}
}
}
use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableTouch.
/**
* Rewrite the metadata for one or more partitions in a table. Useful when
* an external process modifies files on HDFS and you want the pre/post
* hooks to be fired for the specified partition.
*
* @param ast
* The parsed command tree.
* @throws SemanticException
* Parsing failed
*/
private void analyzeAlterTableTouch(String[] qualified, CommonTree ast) throws SemanticException {
Table tab = getTable(qualified);
validateAlterTableType(tab, AlterTableTypes.TOUCH);
inputs.add(new ReadEntity(tab));
// partition name to value
List<Map<String, String>> partSpecs = getPartitionSpecs(tab, ast);
if (partSpecs.size() == 0) {
AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc(getDotName(qualified), null, AlterTableDesc.AlterTableTypes.TOUCH);
outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_NO_LOCK));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc)));
} else {
addTablePartsOutputs(tab, partSpecs, WriteEntity.WriteType.DDL_NO_LOCK);
for (Map<String, String> partSpec : partSpecs) {
AlterTableSimpleDesc touchDesc = new AlterTableSimpleDesc(getDotName(qualified), partSpec, AlterTableDesc.AlterTableTypes.TOUCH);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), touchDesc)));
}
}
}
use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.
the class DDLSemanticAnalyzer method analyzeAlterTableAddParts.
/**
* Add one or more partitions to a table. Useful when the data has been copied
* to the right location by some other process.
*
* @param ast
* The parsed command tree.
*
* @param expectView
* True for ALTER VIEW, false for ALTER TABLE.
*
* @throws SemanticException
* Parsing failed
*/
private void analyzeAlterTableAddParts(String[] qualified, CommonTree ast, boolean expectView) throws SemanticException {
// ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists? alterStatementSuffixAddPartitionsElement+)
boolean ifNotExists = ast.getChild(0).getType() == HiveParser.TOK_IFNOTEXISTS;
Table tab = getTable(qualified);
boolean isView = tab.isView();
validateAlterTableType(tab, AlterTableTypes.ADDPARTITION, expectView);
outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED));
int numCh = ast.getChildCount();
int start = ifNotExists ? 1 : 0;
String currentLocation = null;
Map<String, String> currentPart = null;
// Parser has done some verification, so the order of tokens doesn't need to be verified here.
AddPartitionDesc addPartitionDesc = new AddPartitionDesc(tab.getDbName(), tab.getTableName(), ifNotExists);
for (int num = start; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
switch(child.getToken().getType()) {
case HiveParser.TOK_PARTSPEC:
if (currentPart != null) {
addPartitionDesc.addPartition(currentPart, currentLocation);
currentLocation = null;
}
currentPart = getValidatedPartSpec(tab, child, conf, true);
// validate reserved values
validatePartitionValues(currentPart);
break;
case HiveParser.TOK_PARTITIONLOCATION:
// if location specified, set in partition
if (isView) {
throw new SemanticException("LOCATION clause illegal for view partition");
}
currentLocation = unescapeSQLString(child.getChild(0).getText());
inputs.add(toReadEntity(currentLocation));
break;
default:
throw new SemanticException("Unknown child: " + child);
}
}
// add the last one
if (currentPart != null) {
addPartitionDesc.addPartition(currentPart, currentLocation);
}
if (this.conf.getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
for (int index = 0; index < addPartitionDesc.getPartitionCount(); index++) {
OnePartitionDesc desc = addPartitionDesc.getPartition(index);
if (desc.getLocation() == null) {
if (desc.getPartParams() == null) {
desc.setPartParams(new HashMap<String, String>());
}
StatsSetupConst.setStatsStateForCreateTable(desc.getPartParams(), MetaStoreUtils.getColumnNames(tab.getCols()), StatsSetupConst.TRUE);
}
}
}
if (addPartitionDesc.getPartitionCount() == 0) {
// nothing to do
return;
}
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), addPartitionDesc)));
if (isView) {
// Compile internal query to capture underlying table partition dependencies
StringBuilder cmd = new StringBuilder();
cmd.append("SELECT * FROM ");
cmd.append(HiveUtils.unparseIdentifier(getDotName(qualified)));
cmd.append(" WHERE ");
boolean firstOr = true;
for (int i = 0; i < addPartitionDesc.getPartitionCount(); ++i) {
AddPartitionDesc.OnePartitionDesc partitionDesc = addPartitionDesc.getPartition(i);
if (firstOr) {
firstOr = false;
} else {
cmd.append(" OR ");
}
boolean firstAnd = true;
cmd.append("(");
for (Map.Entry<String, String> entry : partitionDesc.getPartSpec().entrySet()) {
if (firstAnd) {
firstAnd = false;
} else {
cmd.append(" AND ");
}
cmd.append(HiveUtils.unparseIdentifier(entry.getKey()));
cmd.append(" = '");
cmd.append(HiveUtils.escapeString(entry.getValue()));
cmd.append("'");
}
cmd.append(")");
}
SessionState ss = SessionState.get();
String uName = (ss == null ? null : ss.getUserName());
Driver driver = new Driver(conf, uName, queryState.getLineageState());
int rc = driver.compile(cmd.toString(), false);
if (rc != 0) {
throw new SemanticException(ErrorMsg.NO_VALID_PARTN.getMsg());
}
inputs.addAll(driver.getPlan().getInputs());
}
}
use of org.apache.hadoop.hive.ql.hooks.WriteEntity in project hive by apache.
the class FunctionSemanticAnalyzer method addEntities.
/**
* Add write entities to the semantic analyzer to restrict function creation to privileged users.
*/
private void addEntities(String functionName, String className, boolean isTemporaryFunction, List<ResourceUri> resources) throws SemanticException {
// If the function is being added under a database 'namespace', then add an entity representing
// the database (only applicable to permanent/metastore functions).
// We also add a second entity representing the function name.
// The authorization api implementation can decide which entities it wants to use to
// authorize the create/drop function call.
// Add the relevant database 'namespace' as a WriteEntity
Database database = null;
// it matters only for permanent functions
if (!isTemporaryFunction) {
try {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(functionName);
String dbName = qualifiedNameParts[0];
functionName = qualifiedNameParts[1];
database = getDatabase(dbName);
} catch (HiveException e) {
LOG.error("Failed to get database ", e);
throw new SemanticException(e);
}
}
if (database != null) {
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
}
// Add the function name as a WriteEntity
outputs.add(new WriteEntity(database, functionName, className, Type.FUNCTION, WriteEntity.WriteType.DDL_NO_LOCK));
if (resources != null) {
for (ResourceUri resource : resources) {
String uriPath = resource.getUri();
outputs.add(toWriteEntity(uriPath));
}
}
}
Aggregations