use of java.io.PrintStream in project hive by apache.
the class TestExplainTask method explainToString.
private <K, V> String explainToString(Map<K, V> explainMap) throws Exception {
ExplainWork work = new ExplainWork();
ParseContext pCtx = new ParseContext();
HashMap<String, TableScanOperator> topOps = new HashMap<>();
TableScanOperator scanOp = new DummyOperator(new DummyExplainDesc<K, V>(explainMap));
topOps.put("sample", scanOp);
pCtx.setTopOps(topOps);
work.setParseContext(pCtx);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
work.setConfig(new ExplainConfiguration());
new ExplainTask().getJSONLogicalPlan(new PrintStream(baos), work);
baos.close();
return baos.toString();
}
use of java.io.PrintStream in project hive by apache.
the class SQLOperation method setupSessionIO.
private void setupSessionIO(SessionState sessionState) {
try {
// hive server's session input stream is not used
sessionState.in = null;
sessionState.out = new PrintStream(System.out, true, CharEncoding.UTF_8);
sessionState.info = new PrintStream(System.err, true, CharEncoding.UTF_8);
sessionState.err = new PrintStream(System.err, true, CharEncoding.UTF_8);
} catch (UnsupportedEncodingException e) {
LOG.error("Error creating PrintStream", e);
e.printStackTrace();
sessionState.out = null;
sessionState.info = null;
sessionState.err = null;
}
}
use of java.io.PrintStream in project hive by apache.
the class TestExecutor method run.
@Override
public void run() {
while (execute) {
Test test = null;
PrintStream logStream = null;
Logger logger = null;
try {
// start a log cleaner at the start of each test
LogDirectoryCleaner cleaner = new LogDirectoryCleaner(new File(mExecutionContextConfiguration.getGlobalLogDirectory()), mExecutionContextConfiguration.getMaxLogDirectoriesPerProfile());
cleaner.setName("LogCleaner-" + mExecutionContextConfiguration.getGlobalLogDirectory());
cleaner.setDaemon(true);
cleaner.start();
test = mTestQueue.poll(30, TimeUnit.MINUTES);
if (!execute) {
terminateExecutionContext();
break;
}
if (test == null) {
terminateExecutionContext();
} else {
test.setStatus(Status.inProgress());
test.setDequeueTime(System.currentTimeMillis());
if (mExecutionContext == null) {
mExecutionContext = createExceutionContext();
}
test.setExecutionStartTime(System.currentTimeMillis());
TestStartRequest startRequest = test.getStartRequest();
String profile = startRequest.getProfile();
File profileConfFile = new File(mExecutionContextConfiguration.getProfileDirectory(), String.format("%s.properties", profile));
LOG.info("Attempting to run using profile file: {}", profileConfFile);
if (!profileConfFile.isFile()) {
test.setStatus(Status.illegalArgument("Profile " + profile + " not found in directory " + mExecutionContextConfiguration.getProfileDirectory()));
test.setExecutionFinishTime(System.currentTimeMillis());
} else {
File logDir = Dirs.create(new File(mExecutionContextConfiguration.getGlobalLogDirectory(), test.getStartRequest().getTestHandle()));
File logFile = new File(logDir, "execution.txt");
test.setOutputFile(logFile);
logStream = new PrintStream(logFile);
logger = new TestLogger(logStream, TestLogger.LEVEL.DEBUG);
TestConfiguration testConfiguration = TestConfiguration.fromFile(profileConfFile, logger);
testConfiguration.setPatch(startRequest.getPatchURL());
testConfiguration.setJiraName(startRequest.getJiraName());
testConfiguration.setClearLibraryCache(startRequest.isClearLibraryCache());
LocalCommandFactory localCommandFactory = new LocalCommandFactory(logger);
PTest ptest = mPTestBuilder.build(testConfiguration, mExecutionContext, test.getStartRequest().getTestHandle(), logDir, localCommandFactory, new SSHCommandExecutor(logger), new RSyncCommandExecutor(logger, mExecutionContextConfiguration.getMaxRsyncThreads(), localCommandFactory), logger);
int result = ptest.run();
if (result == Constants.EXIT_CODE_SUCCESS) {
test.setStatus(Status.ok());
} else {
test.setStatus(Status.failed("Tests failed with exit code " + result));
}
logStream.flush();
// if all drones where abandoned on a host, try replacing them.
mExecutionContext.replaceBadHosts();
}
}
} catch (Exception e) {
LOG.error("Unxpected Error", e);
if (test != null) {
test.setStatus(Status.failed("Tests failed with exception " + e.getClass().getName() + ": " + e.getMessage()));
if (logger != null) {
String msg = "Error executing " + test.getStartRequest().getTestHandle();
logger.error(msg, e);
}
}
// if we died for any reason lets get a new set of hosts
terminateExecutionContext();
} finally {
if (test != null) {
test.setExecutionFinishTime(System.currentTimeMillis());
}
if (logStream != null) {
logStream.flush();
logStream.close();
}
}
}
}
use of java.io.PrintStream in project kafka by apache.
the class FileStreamSinkTaskTest method setup.
@Before
public void setup() {
os = new ByteArrayOutputStream();
printStream = new PrintStream(os);
task = new FileStreamSinkTask(printStream);
}
use of java.io.PrintStream in project hive by apache.
the class TestBeeLineHistory method testHistory.
@Test
public void testHistory() throws Exception {
ByteArrayOutputStream os = new ByteArrayOutputStream();
PrintStream ops = new PrintStream(os);
BeeLine beeline = new BeeLine();
beeline.getOpts().setHistoryFile(fileName);
beeline.setOutputStream(ops);
Method method = beeline.getClass().getDeclaredMethod("setupHistory");
method.setAccessible(true);
method.invoke(beeline);
beeline.initializeConsoleReader(null);
beeline.dispatch("!history");
String output = os.toString("UTF-8");
String[] tmp = output.split("\n");
Assert.assertTrue(tmp[0].equals("1 : select 1;"));
Assert.assertTrue(tmp[9].equals("10 : select 10;"));
beeline.close();
}
Aggregations