use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.
the class ExprNodeGenericFuncDesc method newInstance.
/**
* Create a ExprNodeGenericFuncDesc based on the genericUDFClass and the
* children parameters. If the function has an explicit name, the
* newInstance method should be passed the function name in the funcText
* argument.
*
* @throws UDFArgumentException
*/
public static ExprNodeGenericFuncDesc newInstance(GenericUDF genericUDF, String funcText, List<ExprNodeDesc> children) throws UDFArgumentException {
ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
for (int i = 0; i < childrenOIs.length; i++) {
childrenOIs[i] = children.get(i).getWritableObjectInspector();
}
// Perform the check here instead of in GenericUDFBaseCompare to guarantee it is only run once per operator
if (genericUDF instanceof GenericUDFBaseCompare && children.size() == 2) {
TypeInfo oiTypeInfo0 = children.get(0).getTypeInfo();
TypeInfo oiTypeInfo1 = children.get(1).getTypeInfo();
SessionState ss = SessionState.get();
Configuration conf = (ss != null) ? ss.getConf() : new Configuration();
LogHelper console = new LogHelper(LOG);
// For now, if a bigint is going to be cast to a double throw an error or warning
if ((oiTypeInfo0.equals(TypeInfoFactory.stringTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) || (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.stringTypeInfo))) {
String error = StrictChecks.checkTypeSafety(conf);
if (error != null)
throw new UDFArgumentException(error);
console.printError("WARNING: Comparing a bigint and a string may result in a loss of precision.");
} else if ((oiTypeInfo0.equals(TypeInfoFactory.doubleTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) || (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.doubleTypeInfo))) {
String error = StrictChecks.checkTypeSafety(conf);
if (error != null)
throw new UDFArgumentException(error);
console.printError("WARNING: Comparing a bigint and a double may result in a loss of precision.");
}
}
ObjectInspector oi = genericUDF.initializeAndFoldConstants(childrenOIs);
String[] requiredJars = genericUDF.getRequiredJars();
String[] requiredFiles = genericUDF.getRequiredFiles();
SessionState ss = SessionState.get();
if (requiredJars != null) {
SessionState.ResourceType t = SessionState.find_resource_type("JAR");
try {
ss.add_resources(t, Arrays.asList(requiredJars));
} catch (Exception e) {
throw new UDFArgumentException(e);
}
}
if (requiredFiles != null) {
SessionState.ResourceType t = SessionState.find_resource_type("FILE");
try {
ss.add_resources(t, Arrays.asList(requiredFiles));
} catch (Exception e) {
throw new UDFArgumentException(e);
}
}
return new ExprNodeGenericFuncDesc(oi, genericUDF, funcText, children);
}
use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.
the class MapJoinCounterHook method run.
public void run(HookContext hookContext) {
HiveConf conf = hookContext.getConf();
boolean enableConvert = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVECONVERTJOIN);
if (!enableConvert) {
return;
}
QueryPlan plan = hookContext.getQueryPlan();
String queryID = plan.getQueryId();
// String query = SessionState.get().getCmd();
int commonJoin = 0;
int hintedMapJoin = 0;
int convertedMapJoin = 0;
int hintedMapJoinLocal = 0;
int convertedMapJoinLocal = 0;
int backupCommonJoin = 0;
List<TaskRunner> list = hookContext.getCompleteTaskList();
for (TaskRunner tskRunner : list) {
Task tsk = tskRunner.getTask();
int tag = tsk.getTaskTag();
switch(tag) {
case Task.COMMON_JOIN:
commonJoin++;
break;
case Task.HINTED_MAPJOIN:
hintedMapJoin++;
break;
case Task.HINTED_MAPJOIN_LOCAL:
hintedMapJoinLocal++;
break;
case Task.CONVERTED_MAPJOIN:
convertedMapJoin++;
break;
case Task.CONVERTED_MAPJOIN_LOCAL:
convertedMapJoinLocal++;
break;
case Task.BACKUP_COMMON_JOIN:
backupCommonJoin++;
break;
}
}
LogHelper console = SessionState.getConsole();
console.printError("[MapJoinCounter PostHook] COMMON_JOIN: " + commonJoin + " HINTED_MAPJOIN: " + hintedMapJoin + " HINTED_MAPJOIN_LOCAL: " + hintedMapJoinLocal + " CONVERTED_MAPJOIN: " + convertedMapJoin + " CONVERTED_MAPJOIN_LOCAL: " + convertedMapJoinLocal + " BACKUP_COMMON_JOIN: " + backupCommonJoin);
}
use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.
the class VerifyOverriddenConfigsHook method run.
public void run(HookContext hookContext) {
LogHelper console = SessionState.getConsole();
SessionState ss = SessionState.get();
if (console == null || ss == null) {
return;
}
for (Entry<String, String> entry : ss.getOverriddenConfigurations().entrySet()) {
if (keysList.contains(entry.getKey())) {
console.printError("Key: " + entry.getKey() + ", Value: " + entry.getValue());
}
}
}
use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.
the class DummySemanticAnalyzerHook1 method preAnalyze.
@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException {
LogHelper console = SessionState.getConsole();
isCreateTable = (ast.getToken().getType() == HiveParser.TOK_CREATETABLE);
myCount = count++;
if (isCreateTable) {
console.printError("DummySemanticAnalyzerHook1 Pre: Count " + myCount);
}
return ast;
}
use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.
the class CheckColumnAccessHook method run.
@Override
public void run(HookContext hookContext) {
HiveConf conf = hookContext.getConf();
if (conf.getBoolVar(HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS) == false) {
return;
}
QueryPlan plan = hookContext.getQueryPlan();
if (plan == null) {
return;
}
ColumnAccessInfo columnAccessInfo = hookContext.getQueryPlan().getColumnAccessInfo();
if (columnAccessInfo == null) {
return;
}
LogHelper console = SessionState.getConsole();
Map<String, List<String>> tableToColumnAccessMap = columnAccessInfo.getTableToColumnAccessMap();
// Must be deterministic order map for consistent test output across Java versions
Map<String, String> outputOrderedMap = new LinkedHashMap<String, String>();
for (Map.Entry<String, List<String>> tableAccess : tableToColumnAccessMap.entrySet()) {
StringBuilder perTableInfo = new StringBuilder();
perTableInfo.append("Table:").append(tableAccess.getKey()).append("\n");
// Sort columns to make output deterministic
String[] columns = new String[tableAccess.getValue().size()];
tableAccess.getValue().toArray(columns);
Arrays.sort(columns);
perTableInfo.append("Columns:").append(StringUtils.join(columns, ',')).append("\n");
outputOrderedMap.put(tableAccess.getKey(), perTableInfo.toString());
}
for (String perOperatorInfo : outputOrderedMap.values()) {
console.printError(perOperatorInfo);
}
}
Aggregations