Search in sources :

Example 6 with LogHelper

use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.

the class ExprNodeGenericFuncDesc method newInstance.

/**
 * Create a ExprNodeGenericFuncDesc based on the genericUDFClass and the
 * children parameters. If the function has an explicit name, the
 * newInstance method should be passed the function name in the funcText
 * argument.
 *
 * @throws UDFArgumentException
 */
public static ExprNodeGenericFuncDesc newInstance(GenericUDF genericUDF, String funcText, List<ExprNodeDesc> children) throws UDFArgumentException {
    ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
    for (int i = 0; i < childrenOIs.length; i++) {
        childrenOIs[i] = children.get(i).getWritableObjectInspector();
    }
    // Perform the check here instead of in GenericUDFBaseCompare to guarantee it is only run once per operator
    if (genericUDF instanceof GenericUDFBaseCompare && children.size() == 2) {
        TypeInfo oiTypeInfo0 = children.get(0).getTypeInfo();
        TypeInfo oiTypeInfo1 = children.get(1).getTypeInfo();
        SessionState ss = SessionState.get();
        Configuration conf = (ss != null) ? ss.getConf() : new Configuration();
        LogHelper console = new LogHelper(LOG);
        // For now, if a bigint is going to be cast to a double throw an error or warning
        if ((oiTypeInfo0.equals(TypeInfoFactory.stringTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) || (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.stringTypeInfo))) {
            String error = StrictChecks.checkTypeSafety(conf);
            if (error != null)
                throw new UDFArgumentException(error);
            console.printError("WARNING: Comparing a bigint and a string may result in a loss of precision.");
        } else if ((oiTypeInfo0.equals(TypeInfoFactory.doubleTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) || (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.doubleTypeInfo))) {
            String error = StrictChecks.checkTypeSafety(conf);
            if (error != null)
                throw new UDFArgumentException(error);
            console.printError("WARNING: Comparing a bigint and a double may result in a loss of precision.");
        }
    }
    ObjectInspector oi = genericUDF.initializeAndFoldConstants(childrenOIs);
    String[] requiredJars = genericUDF.getRequiredJars();
    String[] requiredFiles = genericUDF.getRequiredFiles();
    SessionState ss = SessionState.get();
    if (requiredJars != null) {
        SessionState.ResourceType t = SessionState.find_resource_type("JAR");
        try {
            ss.add_resources(t, Arrays.asList(requiredJars));
        } catch (Exception e) {
            throw new UDFArgumentException(e);
        }
    }
    if (requiredFiles != null) {
        SessionState.ResourceType t = SessionState.find_resource_type("FILE");
        try {
            ss.add_resources(t, Arrays.asList(requiredFiles));
        } catch (Exception e) {
            throw new UDFArgumentException(e);
        }
    }
    return new ExprNodeGenericFuncDesc(oi, genericUDF, funcText, children);
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Configuration(org.apache.hadoop.conf.Configuration) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) GenericUDFBaseCompare(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare)

Example 7 with LogHelper

use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.

the class MapJoinCounterHook method run.

public void run(HookContext hookContext) {
    HiveConf conf = hookContext.getConf();
    boolean enableConvert = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVECONVERTJOIN);
    if (!enableConvert) {
        return;
    }
    QueryPlan plan = hookContext.getQueryPlan();
    String queryID = plan.getQueryId();
    // String query = SessionState.get().getCmd();
    int commonJoin = 0;
    int hintedMapJoin = 0;
    int convertedMapJoin = 0;
    int hintedMapJoinLocal = 0;
    int convertedMapJoinLocal = 0;
    int backupCommonJoin = 0;
    List<TaskRunner> list = hookContext.getCompleteTaskList();
    for (TaskRunner tskRunner : list) {
        Task tsk = tskRunner.getTask();
        int tag = tsk.getTaskTag();
        switch(tag) {
            case Task.COMMON_JOIN:
                commonJoin++;
                break;
            case Task.HINTED_MAPJOIN:
                hintedMapJoin++;
                break;
            case Task.HINTED_MAPJOIN_LOCAL:
                hintedMapJoinLocal++;
                break;
            case Task.CONVERTED_MAPJOIN:
                convertedMapJoin++;
                break;
            case Task.CONVERTED_MAPJOIN_LOCAL:
                convertedMapJoinLocal++;
                break;
            case Task.BACKUP_COMMON_JOIN:
                backupCommonJoin++;
                break;
        }
    }
    LogHelper console = SessionState.getConsole();
    console.printError("[MapJoinCounter PostHook] COMMON_JOIN: " + commonJoin + " HINTED_MAPJOIN: " + hintedMapJoin + " HINTED_MAPJOIN_LOCAL: " + hintedMapJoinLocal + " CONVERTED_MAPJOIN: " + convertedMapJoin + " CONVERTED_MAPJOIN_LOCAL: " + convertedMapJoinLocal + " BACKUP_COMMON_JOIN: " + backupCommonJoin);
}
Also used : Task(org.apache.hadoop.hive.ql.exec.Task) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper) HiveConf(org.apache.hadoop.hive.conf.HiveConf) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) TaskRunner(org.apache.hadoop.hive.ql.exec.TaskRunner)

Example 8 with LogHelper

use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.

the class VerifyOverriddenConfigsHook method run.

public void run(HookContext hookContext) {
    LogHelper console = SessionState.getConsole();
    SessionState ss = SessionState.get();
    if (console == null || ss == null) {
        return;
    }
    for (Entry<String, String> entry : ss.getOverriddenConfigurations().entrySet()) {
        if (keysList.contains(entry.getKey())) {
            console.printError("Key: " + entry.getKey() + ", Value: " + entry.getValue());
        }
    }
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper)

Example 9 with LogHelper

use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.

the class DummySemanticAnalyzerHook1 method preAnalyze.

@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException {
    LogHelper console = SessionState.getConsole();
    isCreateTable = (ast.getToken().getType() == HiveParser.TOK_CREATETABLE);
    myCount = count++;
    if (isCreateTable) {
        console.printError("DummySemanticAnalyzerHook1 Pre: Count " + myCount);
    }
    return ast;
}
Also used : LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper)

Example 10 with LogHelper

use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.

the class CheckColumnAccessHook method run.

@Override
public void run(HookContext hookContext) {
    HiveConf conf = hookContext.getConf();
    if (conf.getBoolVar(HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS) == false) {
        return;
    }
    QueryPlan plan = hookContext.getQueryPlan();
    if (plan == null) {
        return;
    }
    ColumnAccessInfo columnAccessInfo = hookContext.getQueryPlan().getColumnAccessInfo();
    if (columnAccessInfo == null) {
        return;
    }
    LogHelper console = SessionState.getConsole();
    Map<String, List<String>> tableToColumnAccessMap = columnAccessInfo.getTableToColumnAccessMap();
    // Must be deterministic order map for consistent test output across Java versions
    Map<String, String> outputOrderedMap = new LinkedHashMap<String, String>();
    for (Map.Entry<String, List<String>> tableAccess : tableToColumnAccessMap.entrySet()) {
        StringBuilder perTableInfo = new StringBuilder();
        perTableInfo.append("Table:").append(tableAccess.getKey()).append("\n");
        // Sort columns to make output deterministic
        String[] columns = new String[tableAccess.getValue().size()];
        tableAccess.getValue().toArray(columns);
        Arrays.sort(columns);
        perTableInfo.append("Columns:").append(StringUtils.join(columns, ',')).append("\n");
        outputOrderedMap.put(tableAccess.getKey(), perTableInfo.toString());
    }
    for (String perOperatorInfo : outputOrderedMap.values()) {
        console.printError(perOperatorInfo);
    }
}
Also used : LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) ColumnAccessInfo(org.apache.hadoop.hive.ql.parse.ColumnAccessInfo) LinkedHashMap(java.util.LinkedHashMap) HiveConf(org.apache.hadoop.hive.conf.HiveConf) List(java.util.List) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map)

Aggregations

LogHelper (org.apache.hadoop.hive.ql.session.SessionState.LogHelper)15 HiveConf (org.apache.hadoop.hive.conf.HiveConf)5 Map (java.util.Map)4 QueryPlan (org.apache.hadoop.hive.ql.QueryPlan)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 LinkedHashMap (java.util.LinkedHashMap)2 List (java.util.List)2 FileSystem (org.apache.hadoop.fs.FileSystem)2 Path (org.apache.hadoop.fs.Path)2 DriverContext (org.apache.hadoop.hive.ql.DriverContext)2 SessionState (org.apache.hadoop.hive.ql.session.SessionState)2 ImmutableMap (com.google.common.collect.ImmutableMap)1 InputStream (java.io.InputStream)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 LinkedList (java.util.LinkedList)1 Set (java.util.Set)1 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1