use of org.apache.hadoop.hive.ql.plan.HiveOperation in project hive by apache.
the class ExplainTask method collectAuthRelatedEntities.
private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work) throws Exception {
BaseSemanticAnalyzer analyzer = work.getAnalyzer();
HiveOperation operation = queryState.getHiveOperation();
JSONObject object = new JSONObject(new LinkedHashMap<>());
Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work);
if (work.isFormatted()) {
object.put("INPUTS", jsonInput);
}
Object jsonOutput = toJson("OUTPUTS", toString(analyzer.getOutputs()), out, work);
if (work.isFormatted()) {
object.put("OUTPUTS", jsonOutput);
}
String userName = SessionState.get().getAuthenticator().getUserName();
Object jsonUser = toJson("CURRENT_USER", userName, out, work);
if (work.isFormatted()) {
object.put("CURRENT_USER", jsonUser);
}
Object jsonOperation = toJson("OPERATION", operation.name(), out, work);
if (work.isFormatted()) {
object.put("OPERATION", jsonOperation);
}
if (analyzer.skipAuthorization()) {
return object;
}
final List<String> exceptions = new ArrayList<String>();
Object delegate = SessionState.get().getActiveAuthorizer();
if (delegate != null) {
Class itface = SessionState.get().getAuthorizerInterface();
Object authorizer = AuthorizationFactory.create(delegate, itface, new AuthorizationFactory.AuthorizationExceptionHandler() {
public void exception(Exception exception) {
exceptions.add(exception.getMessage());
}
});
SessionState.get().setActiveAuthorizer(authorizer);
try {
Driver.doAuthorization(queryState.getHiveOperation(), analyzer, "");
} finally {
SessionState.get().setActiveAuthorizer(delegate);
}
}
if (!exceptions.isEmpty()) {
Object jsonFails = toJson("AUTHORIZATION_FAILURES", exceptions, out, work);
if (work.isFormatted()) {
object.put("AUTHORIZATION_FAILURES", jsonFails);
}
}
return object;
}
use of org.apache.hadoop.hive.ql.plan.HiveOperation in project hive by apache.
the class SemanticAnalyzer method materializeCTE.
Table materializeCTE(String cteName, CTEClause cte) throws HiveException {
ASTNode createTable = new ASTNode(new ClassicToken(HiveParser.TOK_CREATETABLE));
ASTNode tableName = new ASTNode(new ClassicToken(HiveParser.TOK_TABNAME));
tableName.addChild(new ASTNode(new ClassicToken(HiveParser.Identifier, cteName)));
ASTNode temporary = new ASTNode(new ClassicToken(HiveParser.KW_TEMPORARY, MATERIALIZATION_MARKER));
createTable.addChild(tableName);
createTable.addChild(temporary);
createTable.addChild(cte.cteNode);
SemanticAnalyzer analyzer = new SemanticAnalyzer(queryState);
analyzer.initCtx(ctx);
analyzer.init(false);
// should share cte contexts
analyzer.aliasToCTEs.putAll(aliasToCTEs);
HiveOperation operation = queryState.getHiveOperation();
try {
analyzer.analyzeInternal(createTable);
} finally {
queryState.setCommandType(operation);
}
Table table = analyzer.tableDesc.toTable(conf);
Path location = table.getDataLocation();
try {
location.getFileSystem(conf).mkdirs(location);
} catch (IOException e) {
throw new HiveException(e);
}
table.setMaterializedTable(true);
LOG.info(cteName + " will be materialized into " + location);
cte.table = table;
cte.source = analyzer;
ctx.addMaterializedTable(cteName, table);
return table;
}
use of org.apache.hadoop.hive.ql.plan.HiveOperation in project incubator-atlas by apache.
the class HiveHook method getProcessQualifiedName.
@VisibleForTesting
static String getProcessQualifiedName(HiveMetaStoreBridge dgiBridge, HiveEventContext eventContext, final SortedSet<ReadEntity> sortedHiveInputs, final SortedSet<WriteEntity> sortedHiveOutputs, SortedMap<ReadEntity, Referenceable> hiveInputsMap, SortedMap<WriteEntity, Referenceable> hiveOutputsMap) throws HiveException {
HiveOperation op = eventContext.getOperation();
if (isCreateOp(eventContext)) {
Entity entity = getEntityByType(sortedHiveOutputs, Type.TABLE);
if (entity != null) {
Table outTable = entity.getTable();
//refresh table
outTable = dgiBridge.hiveClient.getTable(outTable.getDbName(), outTable.getTableName());
return HiveMetaStoreBridge.getTableProcessQualifiedName(dgiBridge.getClusterName(), outTable);
}
}
StringBuilder buffer = new StringBuilder(op.getOperationName());
boolean ignoreHDFSPathsinQFName = ignoreHDFSPathsinQFName(op, sortedHiveInputs, sortedHiveOutputs);
if (ignoreHDFSPathsinQFName && LOG.isDebugEnabled()) {
LOG.debug("Ignoring HDFS paths in qualifiedName for {} {} ", op, eventContext.getQueryStr());
}
addInputs(dgiBridge, op, sortedHiveInputs, buffer, hiveInputsMap, ignoreHDFSPathsinQFName);
buffer.append(IO_SEP);
addOutputs(dgiBridge, op, sortedHiveOutputs, buffer, hiveOutputsMap, ignoreHDFSPathsinQFName);
LOG.info("Setting process qualified name to {}", buffer);
return buffer.toString();
}
use of org.apache.hadoop.hive.ql.plan.HiveOperation in project hive by apache.
the class CalcitePlanner method materializeCTE.
@Override
Table materializeCTE(String cteName, CTEClause cte) throws HiveException {
ASTNode createTable = new ASTNode(new ClassicToken(HiveParser.TOK_CREATETABLE));
ASTNode tableName = new ASTNode(new ClassicToken(HiveParser.TOK_TABNAME));
tableName.addChild(new ASTNode(new ClassicToken(HiveParser.Identifier, cteName)));
ASTNode temporary = new ASTNode(new ClassicToken(HiveParser.KW_TEMPORARY, MATERIALIZATION_MARKER));
createTable.addChild(tableName);
createTable.addChild(temporary);
createTable.addChild(cte.cteNode);
CalcitePlanner analyzer = new CalcitePlanner(queryState);
analyzer.initCtx(ctx);
analyzer.init(false);
// should share cte contexts
analyzer.aliasToCTEs.putAll(aliasToCTEs);
HiveOperation operation = queryState.getHiveOperation();
try {
analyzer.analyzeInternal(createTable);
} finally {
queryState.setCommandType(operation);
}
Table table = analyzer.tableDesc.toTable(conf);
Path location = table.getDataLocation();
try {
location.getFileSystem(conf).mkdirs(location);
} catch (IOException e) {
throw new HiveException(e);
}
table.setMaterializedTable(true);
LOG.info(cteName + " will be materialized into " + location);
cte.table = table;
cte.source = analyzer;
ctx.addMaterializedTable(cteName, table);
// For CalcitePlanner, store qualified name too
ctx.addMaterializedTable(table.getDbName() + "." + table.getTableName(), table);
return table;
}
use of org.apache.hadoop.hive.ql.plan.HiveOperation in project hive by apache.
the class SemanticAnalyzerFactory method get.
public static BaseSemanticAnalyzer get(QueryState queryState, ASTNode tree) throws SemanticException {
if (tree.getToken() == null) {
throw new RuntimeException("Empty Syntax Tree");
} else {
HiveOperation opType = commandType.get(tree.getType());
queryState.setCommandType(opType);
switch(tree.getType()) {
case HiveParser.TOK_EXPLAIN:
return new ExplainSemanticAnalyzer(queryState);
case HiveParser.TOK_EXPLAIN_SQ_REWRITE:
return new ExplainSQRewriteSemanticAnalyzer(queryState);
case HiveParser.TOK_LOAD:
return new LoadSemanticAnalyzer(queryState);
case HiveParser.TOK_EXPORT:
return new ExportSemanticAnalyzer(queryState);
case HiveParser.TOK_IMPORT:
return new ImportSemanticAnalyzer(queryState);
case HiveParser.TOK_REPL_DUMP:
return new ReplicationSemanticAnalyzer(queryState);
case HiveParser.TOK_REPL_LOAD:
return new ReplicationSemanticAnalyzer(queryState);
case HiveParser.TOK_REPL_STATUS:
return new ReplicationSemanticAnalyzer(queryState);
case HiveParser.TOK_ALTERTABLE:
{
Tree child = tree.getChild(1);
switch(child.getType()) {
case HiveParser.TOK_ALTERTABLE_RENAME:
case HiveParser.TOK_ALTERTABLE_TOUCH:
case HiveParser.TOK_ALTERTABLE_ARCHIVE:
case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
case HiveParser.TOK_ALTERTABLE_ADDCOLS:
case HiveParser.TOK_ALTERTABLE_RENAMECOL:
case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
case HiveParser.TOK_ALTERTABLE_DROPPARTS:
case HiveParser.TOK_ALTERTABLE_ADDPARTS:
case HiveParser.TOK_ALTERTABLE_PARTCOLTYPE:
case HiveParser.TOK_ALTERTABLE_PROPERTIES:
case HiveParser.TOK_ALTERTABLE_DROPPROPERTIES:
case HiveParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
case HiveParser.TOK_ALTERTABLE_SKEWED:
case HiveParser.TOK_ALTERTABLE_DROPCONSTRAINT:
case HiveParser.TOK_ALTERTABLE_ADDCONSTRAINT:
queryState.setCommandType(commandType.get(child.getType()));
return new DDLSemanticAnalyzer(queryState);
}
opType = tablePartitionCommandType.get(child.getType())[tree.getChildCount() > 2 ? 1 : 0];
queryState.setCommandType(opType);
return new DDLSemanticAnalyzer(queryState);
}
case HiveParser.TOK_ALTERVIEW:
{
Tree child = tree.getChild(1);
switch(child.getType()) {
case HiveParser.TOK_ALTERVIEW_PROPERTIES:
case HiveParser.TOK_ALTERVIEW_DROPPROPERTIES:
case HiveParser.TOK_ALTERVIEW_ADDPARTS:
case HiveParser.TOK_ALTERVIEW_DROPPARTS:
case HiveParser.TOK_ALTERVIEW_RENAME:
opType = commandType.get(child.getType());
queryState.setCommandType(opType);
return new DDLSemanticAnalyzer(queryState);
}
// TOK_ALTERVIEW_AS
assert child.getType() == HiveParser.TOK_QUERY;
queryState.setCommandType(HiveOperation.ALTERVIEW_AS);
return new SemanticAnalyzer(queryState);
}
case HiveParser.TOK_CREATEDATABASE:
case HiveParser.TOK_DROPDATABASE:
case HiveParser.TOK_SWITCHDATABASE:
case HiveParser.TOK_DROPTABLE:
case HiveParser.TOK_DROPVIEW:
case HiveParser.TOK_DROP_MATERIALIZED_VIEW:
case HiveParser.TOK_DESCDATABASE:
case HiveParser.TOK_DESCTABLE:
case HiveParser.TOK_DESCFUNCTION:
case HiveParser.TOK_MSCK:
case HiveParser.TOK_ALTERINDEX_REBUILD:
case HiveParser.TOK_ALTERINDEX_PROPERTIES:
case HiveParser.TOK_SHOWDATABASES:
case HiveParser.TOK_SHOWTABLES:
case HiveParser.TOK_SHOWCOLUMNS:
case HiveParser.TOK_SHOW_TABLESTATUS:
case HiveParser.TOK_SHOW_TBLPROPERTIES:
case HiveParser.TOK_SHOW_CREATEDATABASE:
case HiveParser.TOK_SHOW_CREATETABLE:
case HiveParser.TOK_SHOWFUNCTIONS:
case HiveParser.TOK_SHOWPARTITIONS:
case HiveParser.TOK_SHOWINDEXES:
case HiveParser.TOK_SHOWLOCKS:
case HiveParser.TOK_SHOWDBLOCKS:
case HiveParser.TOK_SHOW_COMPACTIONS:
case HiveParser.TOK_SHOW_TRANSACTIONS:
case HiveParser.TOK_ABORT_TRANSACTIONS:
case HiveParser.TOK_SHOWCONF:
case HiveParser.TOK_SHOWVIEWS:
case HiveParser.TOK_CREATEINDEX:
case HiveParser.TOK_DROPINDEX:
case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
case HiveParser.TOK_LOCKTABLE:
case HiveParser.TOK_UNLOCKTABLE:
case HiveParser.TOK_LOCKDB:
case HiveParser.TOK_UNLOCKDB:
case HiveParser.TOK_CREATEROLE:
case HiveParser.TOK_DROPROLE:
case HiveParser.TOK_GRANT:
case HiveParser.TOK_REVOKE:
case HiveParser.TOK_SHOW_GRANT:
case HiveParser.TOK_GRANT_ROLE:
case HiveParser.TOK_REVOKE_ROLE:
case HiveParser.TOK_SHOW_ROLE_GRANT:
case HiveParser.TOK_SHOW_ROLE_PRINCIPALS:
case HiveParser.TOK_SHOW_ROLES:
case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
case HiveParser.TOK_ALTERDATABASE_OWNER:
case HiveParser.TOK_TRUNCATETABLE:
case HiveParser.TOK_SHOW_SET_ROLE:
case HiveParser.TOK_CACHE_METADATA:
return new DDLSemanticAnalyzer(queryState);
case HiveParser.TOK_CREATEFUNCTION:
case HiveParser.TOK_DROPFUNCTION:
case HiveParser.TOK_RELOADFUNCTION:
return new FunctionSemanticAnalyzer(queryState);
case HiveParser.TOK_ANALYZE:
return new ColumnStatsSemanticAnalyzer(queryState);
case HiveParser.TOK_CREATEMACRO:
case HiveParser.TOK_DROPMACRO:
return new MacroSemanticAnalyzer(queryState);
case HiveParser.TOK_UPDATE_TABLE:
case HiveParser.TOK_DELETE_FROM:
case HiveParser.TOK_MERGE:
return new UpdateDeleteSemanticAnalyzer(queryState);
case HiveParser.TOK_START_TRANSACTION:
case HiveParser.TOK_COMMIT:
case HiveParser.TOK_ROLLBACK:
case HiveParser.TOK_SET_AUTOCOMMIT:
default:
{
SemanticAnalyzer semAnalyzer = HiveConf.getBoolVar(queryState.getConf(), HiveConf.ConfVars.HIVE_CBO_ENABLED) ? new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState);
return semAnalyzer;
}
}
}
}
Aggregations