Search in sources :

Example 1 with CachingPrintStream

use of org.apache.hadoop.hive.common.io.CachingPrintStream in project hive by apache.

the class VerifyCachingPrintStreamHook method run.

public void run(HookContext hookContext) {
    SessionState ss = SessionState.get();
    assert (ss.err instanceof CachingPrintStream);
    if (hookContext.getHookType() == HookType.ON_FAILURE_HOOK) {
        assert (ss.err instanceof CachingPrintStream);
        ss.out.println("Begin cached logs.");
        for (String output : ((CachingPrintStream) ss.err).getOutput()) {
            ss.out.println(output);
        }
        ss.out.println("End cached logs.");
    } else {
        ss.err.println("TEST, this should only appear once in the log.");
    }
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream)

Example 2 with CachingPrintStream

use of org.apache.hadoop.hive.common.io.CachingPrintStream in project hive by apache.

the class QTestUtil method cliInit.

public String cliInit(String tname, boolean recreate) throws Exception {
    if (recreate) {
        cleanUp(tname);
        createSources(tname);
    }
    HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
    Utilities.clearWorkMap(conf);
    CliSessionState ss = createSessionState();
    assert ss != null;
    ss.in = System.in;
    String outFileExtension = getOutFileExtension(tname);
    String stdoutName = null;
    if (outDir != null) {
        // TODO: why is this needed?
        File qf = new File(outDir, tname);
        stdoutName = qf.getName().concat(outFileExtension);
    } else {
        stdoutName = tname + outFileExtension;
    }
    File outf = new File(logDir, stdoutName);
    OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
    if (qSortQuerySet.contains(tname)) {
        ss.out = new SortPrintStream(fo, "UTF-8");
    } else if (qHashQuerySet.contains(tname)) {
        ss.out = new DigestPrintStream(fo, "UTF-8");
    } else if (qSortNHashQuerySet.contains(tname)) {
        ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
    } else {
        ss.out = new PrintStream(fo, true, "UTF-8");
    }
    ss.err = new CachingPrintStream(fo, true, "UTF-8");
    ss.setIsSilent(true);
    SessionState oldSs = SessionState.get();
    boolean canReuseSession = !qNoSessionReuseQuerySet.contains(tname);
    if (oldSs != null && canReuseSession && clusterType.getCoreClusterType() == CoreClusterType.TEZ) {
        // Copy the tezSessionState from the old CliSessionState.
        tezSessionState = oldSs.getTezSession();
        oldSs.setTezSession(null);
        ss.setTezSession(tezSessionState);
        oldSs.close();
    }
    if (oldSs != null && clusterType.getCoreClusterType() == CoreClusterType.SPARK) {
        sparkSession = oldSs.getSparkSession();
        ss.setSparkSession(sparkSession);
        oldSs.setSparkSession(null);
        oldSs.close();
    }
    if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
        oldSs.out.close();
    }
    SessionState.start(ss);
    cliDriver = new CliDriver();
    if (tname.equals("init_file.q")) {
        ss.initFiles.add(AbstractCliConfig.HIVE_ROOT + "/data/scripts/test_init_file.sql");
    }
    cliDriver.processInitFiles(ss);
    return outf.getAbsolutePath();
}
Also used : SortAndDigestPrintStream(org.apache.hadoop.hive.common.io.SortAndDigestPrintStream) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) DigestPrintStream(org.apache.hadoop.hive.common.io.DigestPrintStream) SortPrintStream(org.apache.hadoop.hive.common.io.SortPrintStream) PrintStream(java.io.PrintStream) TezSessionState(org.apache.hadoop.hive.ql.exec.tez.TezSessionState) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) SessionState(org.apache.hadoop.hive.ql.session.SessionState) BufferedOutputStream(java.io.BufferedOutputStream) FileOutputStream(java.io.FileOutputStream) OutputStream(java.io.OutputStream) SortAndDigestPrintStream(org.apache.hadoop.hive.common.io.SortAndDigestPrintStream) DigestPrintStream(org.apache.hadoop.hive.common.io.DigestPrintStream) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) FileOutputStream(java.io.FileOutputStream) SortAndDigestPrintStream(org.apache.hadoop.hive.common.io.SortAndDigestPrintStream) File(java.io.File) BufferedOutputStream(java.io.BufferedOutputStream) SortPrintStream(org.apache.hadoop.hive.common.io.SortPrintStream) CliDriver(org.apache.hadoop.hive.cli.CliDriver)

Example 3 with CachingPrintStream

use of org.apache.hadoop.hive.common.io.CachingPrintStream in project SQLWindowing by hbutani.

the class WindowingHiveCliDriver method run.

public static int run(String[] args) throws Exception {
    OptionsProcessor oproc = new OptionsProcessor();
    if (!oproc.process_stage1(args)) {
        return 1;
    }
    // NOTE: It is critical to do this here so that log4j is reinitialized
    // before any of the other core hive classes are loaded
    boolean logInitFailed = false;
    String logInitDetailMessage;
    try {
        logInitDetailMessage = LogUtils.initHiveLog4j();
    } catch (LogInitializationException e) {
        logInitFailed = true;
        logInitDetailMessage = e.getMessage();
    }
    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
    ss.in = System.in;
    try {
        ss.out = new PrintStream(System.out, true, "UTF-8");
        ss.info = new PrintStream(System.err, true, "UTF-8");
        ss.err = new CachingPrintStream(System.err, true, "UTF-8");
    } catch (UnsupportedEncodingException e) {
        return 3;
    }
    if (!oproc.process_stage2(ss)) {
        return 2;
    }
    if (!ss.getIsSilent()) {
        if (logInitFailed) {
            System.err.println(logInitDetailMessage);
        } else {
            SessionState.getConsole().printInfo(logInitDetailMessage);
        }
    }
    // set all properties specified via command line
    HiveConf conf = ss.getConf();
    for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
        conf.set((String) item.getKey(), (String) item.getValue());
        ss.getOverriddenConfigurations().put((String) item.getKey(), (String) item.getValue());
    }
    SessionState.start(ss);
    // connect to Hive Server
    if (ss.getHost() != null) {
        ss.connect();
        if (ss.isRemoteMode()) {
            prompt = "[" + ss.getHost() + ':' + ss.getPort() + "] " + prompt;
            char[] spaces = new char[prompt.length()];
            Arrays.fill(spaces, ' ');
            prompt2 = new String(spaces);
        }
    }
    // CLI remote mode is a thin client: only load auxJars in local mode
    if (!ss.isRemoteMode() && !ShimLoader.getHadoopShims().usesJobShell()) {
        // hadoop-20 and above - we need to augment classpath using hiveconf
        // components
        // see also: code in ExecDriver.java
        ClassLoader loader = conf.getClassLoader();
        String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
        if (StringUtils.isNotBlank(auxJars)) {
            loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
        }
        conf.setClassLoader(loader);
        Thread.currentThread().setContextClassLoader(loader);
    }
    WindowingHiveCliDriver cli = new WindowingHiveCliDriver();
    cli.setHiveVariables(oproc.getHiveVariables());
    // use the specified database if specified
    cli.processSelectDatabase(ss);
    // Execute -i init files (always in silent mode)
    cli.processInitFiles(ss);
    cli.setupWindowing();
    if (ss.execString != null) {
        return cli.processLine(ss.execString);
    }
    try {
        if (ss.fileName != null) {
            return cli.processFile(ss.fileName);
        }
    } catch (FileNotFoundException e) {
        System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
        return 3;
    }
    ConsoleReader reader = new ConsoleReader();
    reader.setBellEnabled(false);
    // true)));
    for (Completor completor : getCommandCompletor()) {
        reader.addCompletor(completor);
    }
    String line;
    final String HISTORYFILE = ".hivehistory";
    String historyDirectory = System.getProperty("user.home");
    try {
        if ((new File(historyDirectory)).exists()) {
            String historyFile = historyDirectory + File.separator + HISTORYFILE;
            reader.setHistory(new History(new File(historyFile)));
        } else {
            System.err.println("WARNING: Directory for Hive history file: " + historyDirectory + " does not exist.   History will not be available during this session.");
        }
    } catch (Exception e) {
        System.err.println("WARNING: Encountered an error while trying to initialize Hive's " + "history file.  History will not be available during this session.");
        System.err.println(e.getMessage());
    }
    int ret = 0;
    String prefix = "";
    String curDB = getFormattedDb(conf, ss);
    String curPrompt = prompt + curDB;
    String dbSpaces = spacesForString(curDB);
    while ((line = reader.readLine(curPrompt + "> ")) != null) {
        if (!prefix.equals("")) {
            prefix += '\n';
        }
        if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
            line = prefix + line;
            ret = cli.processLine(line, true);
            prefix = "";
            curDB = getFormattedDb(conf, ss);
            curPrompt = prompt + curDB;
            dbSpaces = dbSpaces.length() == curDB.length() ? dbSpaces : spacesForString(curDB);
        } else {
            prefix = prefix + line;
            curPrompt = prompt2 + dbSpaces;
            continue;
        }
    }
    ss.close();
    return ret;
}
Also used : CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) SessionState(org.apache.hadoop.hive.ql.session.SessionState) PrintStream(java.io.PrintStream) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) ConsoleReader(jline.ConsoleReader) FileNotFoundException(java.io.FileNotFoundException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) History(jline.History) OptionsProcessor(org.apache.hadoop.hive.cli.OptionsProcessor) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) FileNotFoundException(java.io.FileNotFoundException) WindowingException(com.sap.hadoop.windowing.WindowingException) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) LogInitializationException(org.apache.hadoop.hive.common.LogUtils.LogInitializationException) HiveConf(org.apache.hadoop.hive.conf.HiveConf) Completor(jline.Completor) Map(java.util.Map) File(java.io.File)

Example 4 with CachingPrintStream

use of org.apache.hadoop.hive.common.io.CachingPrintStream in project phoenix by apache.

the class HiveTestUtil method cliInit.

public String cliInit(String tname, boolean recreate) throws Exception {
    if (recreate) {
        cleanUp();
        createSources();
    }
    HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");
    Utilities.clearWorkMap();
    CliSessionState ss = new CliSessionState(conf);
    assert ss != null;
    ss.in = System.in;
    String outFileExtension = getOutFileExtension(tname);
    String stdoutName = null;
    if (outDir != null) {
        File qf = new File(outDir, tname);
        stdoutName = qf.getName().concat(outFileExtension);
    } else {
        stdoutName = tname + outFileExtension;
    }
    File outf = new File(logDir, stdoutName);
    OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
    if (qSortQuerySet.contains(tname)) {
        ss.out = new SortPrintStream(fo, "UTF-8");
    } else if (qHashQuerySet.contains(tname)) {
        ss.out = new DigestPrintStream(fo, "UTF-8");
    } else if (qSortNHashQuerySet.contains(tname)) {
        ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
    } else {
        ss.out = new PrintStream(fo, true, "UTF-8");
    }
    ss.err = new CachingPrintStream(fo, true, "UTF-8");
    ss.setIsSilent(true);
    SessionState oldSs = SessionState.get();
    if (oldSs != null && clusterType == MiniClusterType.tez) {
        oldSs.close();
    }
    if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
        oldSs.out.close();
    }
    SessionState.start(ss);
    cliDriver = new CliDriver();
    cliDriver.processInitFiles(ss);
    return outf.getAbsolutePath();
}
Also used : CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) DigestPrintStream(org.apache.hadoop.hive.common.io.DigestPrintStream) SortPrintStream(org.apache.hadoop.hive.common.io.SortPrintStream) SortAndDigestPrintStream(org.apache.hadoop.hive.common.io.SortAndDigestPrintStream) PrintStream(java.io.PrintStream) SessionState(org.apache.hadoop.hive.ql.session.SessionState) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) BufferedOutputStream(java.io.BufferedOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) DigestPrintStream(org.apache.hadoop.hive.common.io.DigestPrintStream) SortAndDigestPrintStream(org.apache.hadoop.hive.common.io.SortAndDigestPrintStream) CliSessionState(org.apache.hadoop.hive.cli.CliSessionState) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) FileOutputStream(java.io.FileOutputStream) SortAndDigestPrintStream(org.apache.hadoop.hive.common.io.SortAndDigestPrintStream) File(java.io.File) BufferedOutputStream(java.io.BufferedOutputStream) SortPrintStream(org.apache.hadoop.hive.common.io.SortPrintStream) CliDriver(org.apache.hadoop.hive.cli.CliDriver)

Example 5 with CachingPrintStream

use of org.apache.hadoop.hive.common.io.CachingPrintStream in project hive by apache.

the class MapRedTask method execute.

@Override
public int execute(DriverContext driverContext) {
    Context ctx = driverContext.getCtx();
    boolean ctxCreated = false;
    try {
        if (ctx == null) {
            ctx = new Context(conf);
            ctxCreated = true;
        }
        // estimate number of reducers
        setNumberOfReducers();
        // auto-determine local mode if allowed
        if (!ctx.isLocalOnlyExecutionMode() && conf.getBoolVar(HiveConf.ConfVars.LOCALMODEAUTO)) {
            if (inputSummary == null) {
                inputSummary = Utilities.getInputSummary(driverContext.getCtx(), work.getMapWork(), null);
            }
            // set the values of totalInputFileSize and totalInputNumFiles, estimating them
            // if percentage block sampling is being used
            double samplePercentage = Utilities.getHighestSamplePercentage(work.getMapWork());
            totalInputFileSize = Utilities.getTotalInputFileSize(inputSummary, work.getMapWork(), samplePercentage);
            totalInputNumFiles = Utilities.getTotalInputNumFiles(inputSummary, work.getMapWork(), samplePercentage);
            // at this point the number of reducers is precisely defined in the plan
            int numReducers = work.getReduceWork() == null ? 0 : work.getReduceWork().getNumReduceTasks();
            if (LOG.isDebugEnabled()) {
                LOG.debug("Task: " + getId() + ", Summary: " + totalInputFileSize + "," + totalInputNumFiles + "," + numReducers);
            }
            String reason = MapRedTask.isEligibleForLocalMode(conf, numReducers, totalInputFileSize, totalInputNumFiles);
            if (reason == null) {
                // clone configuration before modifying it on per-task basis
                cloneConf();
                ShimLoader.getHadoopShims().setJobLauncherRpcAddress(conf, "local");
                console.printInfo("Selecting local mode for task: " + getId());
                this.setLocalMode(true);
            } else {
                console.printInfo("Cannot run job locally: " + reason);
                this.setLocalMode(false);
            }
        }
        runningViaChild = conf.getBoolVar(HiveConf.ConfVars.SUBMITVIACHILD);
        if (!runningViaChild) {
            // in ExecDriver as well to have proper local properties.
            if (this.isLocalMode()) {
                // save the original job tracker
                ctx.setOriginalTracker(ShimLoader.getHadoopShims().getJobLauncherRpcAddress(job));
                // change it to local
                ShimLoader.getHadoopShims().setJobLauncherRpcAddress(job, "local");
            }
            // we are not running this mapred task via child jvm
            // so directly invoke ExecDriver
            int ret = super.execute(driverContext);
            // restore the previous properties for framework name, RM address etc.
            if (this.isLocalMode()) {
                // restore the local job tracker back to original
                ctx.restoreOriginalTracker();
            }
            return ret;
        }
        // we need to edit the configuration to setup cmdline. clone it first
        cloneConf();
        // propagate input format if necessary
        super.setInputAttributes(conf);
        // enable assertion
        String hadoopExec = conf.getVar(HiveConf.ConfVars.HADOOPBIN);
        String hiveJar = conf.getJar();
        String libJars = super.getResource(conf, ResourceType.JAR);
        String libJarsOption = StringUtils.isEmpty(libJars) ? " " : " -libjars " + libJars + " ";
        // Generate the hiveConfArgs after potentially adding the jars
        String hiveConfArgs = generateCmdLine(conf, ctx);
        // write out the plan to a local file
        Path planPath = new Path(ctx.getLocalTmpPath(), "plan.xml");
        MapredWork plan = getWork();
        LOG.info("Generating plan file " + planPath.toString());
        OutputStream out = null;
        try {
            out = FileSystem.getLocal(conf).create(planPath);
            SerializationUtilities.serializePlan(plan, out);
            out.close();
            out = null;
        } finally {
            IOUtils.closeQuietly(out);
        }
        String isSilent = "true".equalsIgnoreCase(System.getProperty("test.silent")) ? "-nolog" : "";
        String jarCmd = hiveJar + " " + ExecDriver.class.getName() + libJarsOption;
        String cmdLine = hadoopExec + " jar " + jarCmd + " -plan " + planPath.toString() + " " + isSilent + " " + hiveConfArgs;
        String workDir = (new File(".")).getCanonicalPath();
        String files = super.getResource(conf, ResourceType.FILE);
        if (!files.isEmpty()) {
            cmdLine = cmdLine + " -files " + files;
            workDir = ctx.getLocalTmpPath().toUri().getPath();
            if (!(new File(workDir)).mkdir()) {
                throw new IOException("Cannot create tmp working dir: " + workDir);
            }
            for (String f : StringUtils.split(files, ',')) {
                Path p = new Path(f);
                String target = p.toUri().getPath();
                String link = workDir + Path.SEPARATOR + p.getName();
                if (FileUtil.symLink(target, link) != 0) {
                    throw new IOException("Cannot link to added file: " + target + " from: " + link);
                }
            }
        }
        LOG.info("Executing: " + cmdLine);
        // Inherit Java system variables
        String hadoopOpts;
        StringBuilder sb = new StringBuilder();
        Properties p = System.getProperties();
        for (String element : HIVE_SYS_PROP) {
            if (p.containsKey(element)) {
                sb.append(" -D" + element + "=" + p.getProperty(element));
            }
        }
        hadoopOpts = sb.toString();
        // Inherit the environment variables
        String[] env;
        Map<String, String> variables = new HashMap<String, String>(System.getenv());
        if (ShimLoader.getHadoopShims().isLocalMode(conf)) {
            // if we are running in local mode - then the amount of memory used
            // by the child jvm can no longer default to the memory used by the
            // parent jvm
            int hadoopMem = conf.getIntVar(HiveConf.ConfVars.HIVEHADOOPMAXMEM);
            if (hadoopMem == 0) {
                // remove env var that would default child jvm to use parent's memory
                // as default. child jvm would use default memory for a hadoop client
                variables.remove(HADOOP_MEM_KEY);
            } else {
                // user specified the memory for local mode hadoop run
                variables.put(HADOOP_MEM_KEY, String.valueOf(hadoopMem));
            }
        } else {
        // nothing to do - we are not running in local mode - only submitting
        // the job via a child process. in this case it's appropriate that the
        // child jvm use the same memory as the parent jvm
        }
        if (variables.containsKey(HADOOP_OPTS_KEY)) {
            variables.put(HADOOP_OPTS_KEY, variables.get(HADOOP_OPTS_KEY) + hadoopOpts);
        } else {
            variables.put(HADOOP_OPTS_KEY, hadoopOpts);
        }
        if (variables.containsKey(HIVE_DEBUG_RECURSIVE)) {
            configureDebugVariablesForChildJVM(variables);
        }
        env = new String[variables.size()];
        int pos = 0;
        for (Map.Entry<String, String> entry : variables.entrySet()) {
            String name = entry.getKey();
            String value = entry.getValue();
            env[pos++] = name + "=" + value;
        }
        // Run ExecDriver in another JVM
        executor = Runtime.getRuntime().exec(cmdLine, env, new File(workDir));
        CachingPrintStream errPrintStream = new CachingPrintStream(SessionState.getConsole().getChildErrStream());
        StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, SessionState.getConsole().getChildOutStream());
        StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, errPrintStream);
        outPrinter.start();
        errPrinter.start();
        int exitVal = jobExecHelper.progressLocal(executor, getId());
        // wait for stream threads to finish
        outPrinter.join();
        errPrinter.join();
        if (exitVal != 0) {
            LOG.error("Execution failed with exit status: " + exitVal);
            if (SessionState.get() != null) {
                SessionState.get().addLocalMapRedErrors(getId(), errPrintStream.getOutput());
            }
        } else {
            LOG.info("Execution completed successfully");
        }
        return exitVal;
    } catch (Exception e) {
        LOG.error("Got exception", e);
        return (1);
    } finally {
        try {
            // sure to clear it out
            if (ctxCreated) {
                ctx.clear();
            }
        } catch (Exception e) {
            LOG.error("Exception: ", e);
        }
    }
}
Also used : Context(org.apache.hadoop.hive.ql.Context) DriverContext(org.apache.hadoop.hive.ql.DriverContext) Path(org.apache.hadoop.fs.Path) HashMap(java.util.HashMap) OutputStream(java.io.OutputStream) IOException(java.io.IOException) Properties(java.util.Properties) IOException(java.io.IOException) CachingPrintStream(org.apache.hadoop.hive.common.io.CachingPrintStream) MapredWork(org.apache.hadoop.hive.ql.plan.MapredWork) StreamPrinter(org.apache.hive.common.util.StreamPrinter) File(java.io.File) HashMap(java.util.HashMap) Map(java.util.Map)

Aggregations

CachingPrintStream (org.apache.hadoop.hive.common.io.CachingPrintStream)7 File (java.io.File)5 SessionState (org.apache.hadoop.hive.ql.session.SessionState)5 OutputStream (java.io.OutputStream)4 PrintStream (java.io.PrintStream)4 Map (java.util.Map)4 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)4 BufferedOutputStream (java.io.BufferedOutputStream)2 FileOutputStream (java.io.FileOutputStream)2 IOException (java.io.IOException)2 UnsupportedEncodingException (java.io.UnsupportedEncodingException)2 HashMap (java.util.HashMap)2 Properties (java.util.Properties)2 Path (org.apache.hadoop.fs.Path)2 CliDriver (org.apache.hadoop.hive.cli.CliDriver)2 OptionsProcessor (org.apache.hadoop.hive.cli.OptionsProcessor)2 LogInitializationException (org.apache.hadoop.hive.common.LogUtils.LogInitializationException)2 DigestPrintStream (org.apache.hadoop.hive.common.io.DigestPrintStream)2 SortAndDigestPrintStream (org.apache.hadoop.hive.common.io.SortAndDigestPrintStream)2 SortPrintStream (org.apache.hadoop.hive.common.io.SortPrintStream)2