Search in sources :

Example 1 with LogHelper

use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.

the class PartialScanTask method main.

public static void main(String[] args) {
    String inputPathStr = null;
    String outputDir = null;
    String jobConfFileName = null;
    try {
        for (int i = 0; i < args.length; i++) {
            if (args[i].equals("-input")) {
                inputPathStr = args[++i];
            } else if (args[i].equals("-jobconffile")) {
                jobConfFileName = args[++i];
            } else if (args[i].equals("-outputDir")) {
                outputDir = args[++i];
            }
        }
    } catch (IndexOutOfBoundsException e) {
        System.err.println("Missing argument to option");
        printUsage();
    }
    if (inputPathStr == null || outputDir == null || outputDir.trim().equals("")) {
        printUsage();
    }
    List<Path> inputPaths = new ArrayList<Path>();
    String[] paths = inputPathStr.split(INPUT_SEPERATOR);
    if (paths == null || paths.length == 0) {
        printUsage();
    }
    FileSystem fs = null;
    JobConf conf = new JobConf(PartialScanTask.class);
    for (String path : paths) {
        try {
            Path pathObj = new Path(path);
            if (fs == null) {
                fs = FileSystem.get(pathObj.toUri(), conf);
            }
            FileStatus fstatus = fs.getFileStatus(pathObj);
            if (fstatus.isDir()) {
                FileStatus[] fileStatus = fs.listStatus(pathObj);
                for (FileStatus st : fileStatus) {
                    inputPaths.add(st.getPath());
                }
            } else {
                inputPaths.add(fstatus.getPath());
            }
        } catch (IOException e) {
            e.printStackTrace(System.err);
        }
    }
    if (jobConfFileName != null) {
        conf.addResource(new Path(jobConfFileName));
    }
    org.slf4j.Logger LOG = LoggerFactory.getLogger(PartialScanTask.class.getName());
    boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);
    LogHelper console = new LogHelper(LOG, isSilent);
    // that it's easy to find reason for local mode execution failures
    for (Appender appender : ((Logger) LogManager.getRootLogger()).getAppenders().values()) {
        if (appender instanceof FileAppender) {
            console.printInfo("Execution log at: " + ((FileAppender) appender).getFileName());
        } else if (appender instanceof RollingFileAppender) {
            console.printInfo("Execution log at: " + ((RollingFileAppender) appender).getFileName());
        }
    }
    QueryState queryState = new QueryState(new HiveConf(conf, PartialScanTask.class));
    PartialScanWork mergeWork = new PartialScanWork(inputPaths);
    DriverContext driverCxt = new DriverContext();
    PartialScanTask taskExec = new PartialScanTask();
    taskExec.initialize(queryState, null, driverCxt, new CompilationOpContext());
    taskExec.setWork(mergeWork);
    int ret = taskExec.execute(driverCxt);
    if (ret != 0) {
        System.exit(2);
    }
}
Also used : DriverContext(org.apache.hadoop.hive.ql.DriverContext) FileStatus(org.apache.hadoop.fs.FileStatus) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper) RollingFileAppender(org.apache.logging.log4j.core.appender.RollingFileAppender) ArrayList(java.util.ArrayList) FileSystem(org.apache.hadoop.fs.FileSystem) HiveConf(org.apache.hadoop.hive.conf.HiveConf) JobConf(org.apache.hadoop.mapred.JobConf) Path(org.apache.hadoop.fs.Path) Appender(org.apache.logging.log4j.core.Appender) FileAppender(org.apache.logging.log4j.core.appender.FileAppender) RollingFileAppender(org.apache.logging.log4j.core.appender.RollingFileAppender) FileAppender(org.apache.logging.log4j.core.appender.FileAppender) RollingFileAppender(org.apache.logging.log4j.core.appender.RollingFileAppender) IOException(java.io.IOException) QueryState(org.apache.hadoop.hive.ql.QueryState) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext)

Example 2 with LogHelper

use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.

the class DummySemanticAnalyzerHook1 method postAnalyze.

@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context, List<Task<? extends Serializable>> rootTasks) throws SemanticException {
    count = 0;
    if (!isCreateTable) {
        return;
    }
    CreateTableDesc desc = ((DDLTask) rootTasks.get(rootTasks.size() - 1)).getWork().getCreateTblDesc();
    Map<String, String> tblProps = desc.getTblProps();
    if (tblProps == null) {
        tblProps = new HashMap<String, String>();
    }
    tblProps.put("createdBy", DummyCreateTableHook.class.getName());
    tblProps.put("Message", "Hive rocks!! Count: " + myCount);
    LogHelper console = SessionState.getConsole();
    console.printError("DummySemanticAnalyzerHook1 Post: Hive rocks!! Count: " + myCount);
}
Also used : CreateTableDesc(org.apache.hadoop.hive.ql.plan.CreateTableDesc) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper)

Example 3 with LogHelper

use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.

the class PostExecutePrinter method run.

public void run(QueryState queryState, Set<ReadEntity> inputs, Set<WriteEntity> outputs, LineageInfo linfo, UserGroupInformation ugi) throws Exception {
    LogHelper console = SessionState.getConsole();
    if (console == null) {
        return;
    }
    if (queryState != null) {
        console.printInfo("POSTHOOK: query: " + queryState.getQueryString().trim(), false);
        console.printInfo("POSTHOOK: type: " + queryState.getCommandType(), false);
    }
    PreExecutePrinter.printEntities(console, inputs, "POSTHOOK: Input: ");
    PreExecutePrinter.printEntities(console, outputs, "POSTHOOK: Output: ");
    // Also print out the generic lineage information if there is any
    if (linfo != null) {
        LinkedList<Map.Entry<DependencyKey, Dependency>> entry_list = new LinkedList<Map.Entry<DependencyKey, Dependency>>(linfo.entrySet());
        Collections.sort(entry_list, new DependencyKeyComp());
        Iterator<Map.Entry<DependencyKey, Dependency>> iter = entry_list.iterator();
        while (iter.hasNext()) {
            Map.Entry<DependencyKey, Dependency> it = iter.next();
            Dependency dep = it.getValue();
            DependencyKey depK = it.getKey();
            if (dep == null) {
                continue;
            }
            StringBuilder sb = new StringBuilder();
            sb.append("POSTHOOK: Lineage: ");
            if (depK.getDataContainer().isPartition()) {
                Partition part = depK.getDataContainer().getPartition();
                sb.append(part.getTableName());
                sb.append(" PARTITION(");
                int i = 0;
                for (FieldSchema fs : depK.getDataContainer().getTable().getPartitionKeys()) {
                    if (i != 0) {
                        sb.append(",");
                    }
                    sb.append(fs.getName() + "=" + part.getValues().get(i++));
                }
                sb.append(")");
            } else {
                sb.append(depK.getDataContainer().getTable().getTableName());
            }
            sb.append("." + depK.getFieldSchema().getName() + " " + dep.getType() + " ");
            sb.append("[");
            for (BaseColumnInfo col : dep.getBaseCols()) {
                sb.append("(" + col.getTabAlias().getTable().getTableName() + ")" + col.getTabAlias().getAlias() + "." + col.getColumn() + ", ");
            }
            sb.append("]");
            console.printInfo(sb.toString(), false);
        }
    }
}
Also used : Partition(org.apache.hadoop.hive.metastore.api.Partition) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) DependencyKey(org.apache.hadoop.hive.ql.hooks.LineageInfo.DependencyKey) Dependency(org.apache.hadoop.hive.ql.hooks.LineageInfo.Dependency) LinkedList(java.util.LinkedList) BaseColumnInfo(org.apache.hadoop.hive.ql.hooks.LineageInfo.BaseColumnInfo) Map(java.util.Map)

Example 4 with LogHelper

use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.

the class CheckTableAccessHook method run.

public void run(HookContext hookContext) {
    HiveConf conf = hookContext.getConf();
    if (conf.getBoolVar(HiveConf.ConfVars.HIVE_STATS_COLLECT_TABLEKEYS) == false) {
        return;
    }
    QueryPlan plan = hookContext.getQueryPlan();
    if (plan == null) {
        return;
    }
    TableAccessInfo tableAccessInfo = hookContext.getQueryPlan().getTableAccessInfo();
    if (tableAccessInfo == null || tableAccessInfo.getOperatorToTableAccessMap() == null || tableAccessInfo.getOperatorToTableAccessMap().isEmpty()) {
        return;
    }
    LogHelper console = SessionState.getConsole();
    Map<Operator<? extends OperatorDesc>, Map<String, List<String>>> operatorToTableAccessMap = tableAccessInfo.getOperatorToTableAccessMap();
    // Must be deterministic order map for consistent q-test output across Java versions
    Map<String, String> outputOrderedMap = new LinkedHashMap<String, String>();
    for (Map.Entry<Operator<? extends OperatorDesc>, Map<String, List<String>>> tableAccess : operatorToTableAccessMap.entrySet()) {
        StringBuilder perOperatorInfo = new StringBuilder();
        perOperatorInfo.append("Operator:").append(tableAccess.getKey().getOperatorId()).append("\n");
        for (Map.Entry<String, List<String>> entry : tableAccess.getValue().entrySet()) {
            perOperatorInfo.append("Table:").append(entry.getKey()).append("\n");
            perOperatorInfo.append("Keys:").append(StringUtils.join(entry.getValue(), ',')).append("\n");
        }
        outputOrderedMap.put(tableAccess.getKey().getOperatorId(), perOperatorInfo.toString());
    }
    for (String perOperatorInfo : outputOrderedMap.values()) {
        console.printError(perOperatorInfo);
    }
}
Also used : Operator(org.apache.hadoop.hive.ql.exec.Operator) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper) QueryPlan(org.apache.hadoop.hive.ql.QueryPlan) LinkedHashMap(java.util.LinkedHashMap) TableAccessInfo(org.apache.hadoop.hive.ql.parse.TableAccessInfo) HiveConf(org.apache.hadoop.hive.conf.HiveConf) List(java.util.List) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map)

Example 5 with LogHelper

use of org.apache.hadoop.hive.ql.session.SessionState.LogHelper in project hive by apache.

the class CheckQueryPropertiesHook method run.

public void run(HookContext hookContext) {
    LogHelper console = SessionState.getConsole();
    if (console == null) {
        return;
    }
    QueryProperties queryProps = hookContext.getQueryPlan().getQueryProperties();
    if (queryProps != null) {
        console.printError("Has Join: " + queryProps.hasJoin());
        console.printError("Has Group By: " + queryProps.hasGroupBy());
        console.printError("Has Sort By: " + queryProps.hasSortBy());
        console.printError("Has Order By: " + queryProps.hasOrderBy());
        console.printError("Has Group By After Join: " + queryProps.hasJoinFollowedByGroupBy());
        console.printError("Uses Script: " + queryProps.usesScript());
        console.printError("Has Distribute By: " + queryProps.hasDistributeBy());
        console.printError("Has Cluster By: " + queryProps.hasClusterBy());
    }
}
Also used : QueryProperties(org.apache.hadoop.hive.ql.QueryProperties) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper)

Aggregations

LogHelper (org.apache.hadoop.hive.ql.session.SessionState.LogHelper)16 HiveConf (org.apache.hadoop.hive.conf.HiveConf)5 Map (java.util.Map)4 QueryPlan (org.apache.hadoop.hive.ql.QueryPlan)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 LinkedHashMap (java.util.LinkedHashMap)2 List (java.util.List)2 FileSystem (org.apache.hadoop.fs.FileSystem)2 Path (org.apache.hadoop.fs.Path)2 ImmutableMap (com.google.common.collect.ImmutableMap)1 InputStream (java.io.InputStream)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 LinkedList (java.util.LinkedList)1 Set (java.util.Set)1 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1 RelReferentialConstraint (org.apache.calcite.rel.RelReferentialConstraint)1 ImmutableBitSet (org.apache.calcite.util.ImmutableBitSet)1