use of org.pentaho.di.core.util.StreamLogger in project pentaho-kettle by pentaho.
the class TextFileOutput method initCommandStreamWriter.
private void initCommandStreamWriter(String cmdstr) throws KettleException {
data.writer = null;
try {
if (log.isDebug()) {
logDebug("Spawning external process");
}
if (data.cmdProc != null) {
logError("Previous command not correctly terminated");
setErrors(1);
}
if (Const.getOS().equals("Windows 95")) {
cmdstr = "command.com /C " + cmdstr;
} else {
if (Const.getOS().startsWith("Windows")) {
cmdstr = "cmd.exe /C " + cmdstr;
}
}
if (isDetailed()) {
logDetailed("Starting: " + cmdstr);
}
Runtime runtime = Runtime.getRuntime();
data.cmdProc = runtime.exec(cmdstr, EnvUtil.getEnvironmentVariablesForRuntimeExec());
data.writer = data.cmdProc.getOutputStream();
StreamLogger stdoutLogger = new StreamLogger(log, data.cmdProc.getInputStream(), "(stdout)");
StreamLogger stderrLogger = new StreamLogger(log, data.cmdProc.getErrorStream(), "(stderr)");
new Thread(stdoutLogger).start();
new Thread(stderrLogger).start();
} catch (Exception e) {
throw new KettleException("Error opening new file : " + e.toString());
}
}
use of org.pentaho.di.core.util.StreamLogger in project pentaho-kettle by pentaho.
the class TextFileOutputLegacy method initCommandStreamWriter.
private void initCommandStreamWriter(String cmdstr) throws KettleException {
data.writer = null;
try {
if (log.isDebug()) {
logDebug("Spawning external process");
}
if (data.cmdProc != null) {
logError("Previous command not correctly terminated");
setErrors(1);
}
if (Const.getOS().equals("Windows 95")) {
cmdstr = "command.com /C " + cmdstr;
} else {
if (Const.getOS().startsWith("Windows")) {
cmdstr = "cmd.exe /C " + cmdstr;
}
}
if (isDetailed()) {
logDetailed("Starting: " + cmdstr);
}
Runtime runtime = Runtime.getRuntime();
data.cmdProc = runtime.exec(cmdstr, EnvUtil.getEnvironmentVariablesForRuntimeExec());
data.writer = data.cmdProc.getOutputStream();
StreamLogger stdoutLogger = new StreamLogger(log, data.cmdProc.getInputStream(), "(stdout)");
StreamLogger stderrLogger = new StreamLogger(log, data.cmdProc.getErrorStream(), "(stderr)");
new Thread(stdoutLogger).start();
new Thread(stderrLogger).start();
} catch (Exception e) {
throw new KettleException("Error opening new file : " + e.toString());
}
}
use of org.pentaho.di.core.util.StreamLogger in project pentaho-kettle by pentaho.
the class IngresVectorwiseLoader method execute.
public boolean execute(IngresVectorwiseLoaderMeta meta) throws KettleException {
Runtime rt = Runtime.getRuntime();
try {
// 1) Create the FIFO file using the "mkfifo" command...
// Make sure to log all the possible output, also from STDERR
//
data.fifoFilename = environmentSubstitute(meta.getFifoFileName());
File fifoFile = new File(data.fifoFilename);
if (!fifoFile.exists()) {
// MKFIFO!
//
String mkFifoCmd = "mkfifo -m 666 [" + data.fifoFilename + "]";
// handle spaces and permissions all
String[] args = new String[] { "mkfifo", "-m", "666", data.fifoFilename };
// at once.
logDetailed("Creating FIFO file using this command : " + mkFifoCmd);
Process mkFifoProcess = rt.exec(args);
StreamLogger errorLogger = new StreamLogger(log, mkFifoProcess.getErrorStream(), "mkFifoError");
StreamLogger outputLogger = new StreamLogger(log, mkFifoProcess.getInputStream(), "mkFifoOuptut");
new Thread(errorLogger).start();
new Thread(outputLogger).start();
int result = mkFifoProcess.waitFor();
if (result != 0) {
throw new Exception("Return code " + result + " received from statement : " + mkFifoCmd);
}
// String chmodCmd = "chmod 666 " + data.fifoFilename;
// logDetailed("Setting FIFO file permissings using this command : " + chmodCmd);
// Process chmodProcess = rt.exec(chmodCmd);
// errorLogger = new StreamLogger(log, chmodProcess.getErrorStream(), "chmodError");
// outputLogger = new StreamLogger(log, chmodProcess.getInputStream(), "chmodOuptut");
// new Thread(errorLogger).start();
// new Thread(outputLogger).start();
// result = chmodProcess.waitFor();
// if (result != 0) {
// throw new Exception("Return code " + result + " received from statement : " + chmodCmd);
// }
}
// 2) Execute the Ingres "sql" command...
//
String cmd = createCommandLine(meta);
String logMessage = masqueradPassword(cmd);
if (meta.isUseDynamicVNode()) {
// masquerading the password for log
logMessage = masqueradPassword(cmd);
}
logDetailed("Executing command: " + logMessage);
try {
data.sqlProcess = rt.exec(cmd);
} catch (IOException ex) {
throw new KettleException("Error while executing psql : " + logMessage, ex);
}
// any error message?
//
data.errorLogger = new StreamLogger(log, data.sqlProcess.getErrorStream(), "ERR_SQL", true);
new Thread(data.errorLogger).start();
// any output?
data.outputLogger = new StreamLogger(log, data.sqlProcess.getInputStream(), "OUT_SQL");
// Where do we send the data to? --> To STDIN of the sql process
//
data.sqlOutputStream = data.sqlProcess.getOutputStream();
logWriter = new LogWriter(data.sqlProcess.getInputStream());
logWriteThread = new Thread(logWriter, "IngresVecorWiseStepLogWriter");
logWriteThread.start();
vwLoadMonitor = new VWloadMonitor(data.sqlProcess, logWriter, logWriteThread);
vwLoadMonitorThread = new Thread(vwLoadMonitor);
vwLoadMonitorThread.start();
logDetailed("Connected to VectorWise with the 'sql' command.");
// OK, from here on, we need to feed in the COPY command followed by the
// data into the pgOutputStream
//
String loadCommand = createLoadCommand();
logDetailed("Executing command: " + loadCommand);
data.sqlRunner = new SqlRunner(data, loadCommand);
data.sqlRunner.start();
logDetailed("LOAD TABLE command started");
// Open a new fifo output stream, buffered.
//
openFifoFile();
logDetailed("Fifo stream opened");
// Wait until it all hooks up in the FIFO
//
waitForAConnection();
logDetailed("Ready to start bulk loading!");
} catch (Exception ex) {
throw new KettleException(ex);
}
return true;
}
use of org.pentaho.di.core.util.StreamLogger in project pentaho-kettle by pentaho.
the class MySQLBulkLoader method execute.
public boolean execute(MySQLBulkLoaderMeta meta) throws KettleException {
Runtime rt = Runtime.getRuntime();
try {
// 1) Create the FIFO file using the "mkfifo" command...
// Make sure to log all the possible output, also from STDERR
//
data.fifoFilename = environmentSubstitute(meta.getFifoFileName());
File fifoFile = new File(data.fifoFilename);
if (!fifoFile.exists()) {
// MKFIFO!
//
String mkFifoCmd = "mkfifo " + data.fifoFilename;
//
logBasic(BaseMessages.getString(PKG, "MySQLBulkLoader.Message.CREATINGFIFO", data.dbDescription, mkFifoCmd));
Process mkFifoProcess = rt.exec(mkFifoCmd);
StreamLogger errorLogger = new StreamLogger(log, mkFifoProcess.getErrorStream(), "mkFifoError");
StreamLogger outputLogger = new StreamLogger(log, mkFifoProcess.getInputStream(), "mkFifoOuptut");
new Thread(errorLogger).start();
new Thread(outputLogger).start();
int result = mkFifoProcess.waitFor();
if (result != 0) {
throw new Exception(BaseMessages.getString(PKG, "MySQLBulkLoader.Message.ERRORFIFORC", result, mkFifoCmd));
}
String chmodCmd = "chmod 666 " + data.fifoFilename;
logBasic(BaseMessages.getString(PKG, "MySQLBulkLoader.Message.SETTINGPERMISSIONSFIFO", data.dbDescription, chmodCmd));
Process chmodProcess = rt.exec(chmodCmd);
errorLogger = new StreamLogger(log, chmodProcess.getErrorStream(), "chmodError");
outputLogger = new StreamLogger(log, chmodProcess.getInputStream(), "chmodOuptut");
new Thread(errorLogger).start();
new Thread(outputLogger).start();
result = chmodProcess.waitFor();
if (result != 0) {
throw new Exception(BaseMessages.getString(PKG, "MySQLBulkLoader.Message.ERRORFIFORC", result, chmodCmd));
}
}
// 2) Make a connection to MySQL for sending SQL commands
// (Also, we need a clear cache for getting up-to-date target metadata)
DBCache.getInstance().clear(meta.getDatabaseMeta().getName());
if (meta.getDatabaseMeta() == null) {
logError(BaseMessages.getString(PKG, "MySQLBulkLoader.Init.ConnectionMissing", getStepname()));
return false;
}
data.db = new Database(this, meta.getDatabaseMeta());
data.db.shareVariablesWith(this);
PluginInterface dbPlugin = PluginRegistry.getInstance().getPlugin(DatabasePluginType.class, meta.getDatabaseMeta().getDatabaseInterface());
data.dbDescription = (dbPlugin != null) ? dbPlugin.getDescription() : BaseMessages.getString(PKG, "MySQLBulkLoader.UnknownDB");
// Connect to the database
if (getTransMeta().isUsingUniqueConnections()) {
synchronized (getTrans()) {
data.db.connect(getTrans().getTransactionId(), getPartitionID());
}
} else {
data.db.connect(getPartitionID());
}
logBasic(BaseMessages.getString(PKG, "MySQLBulkLoader.Message.CONNECTED", data.dbDescription));
// 3) Now we are ready to run the load command...
//
executeLoadCommand();
} catch (Exception ex) {
throw new KettleException(ex);
}
return true;
}
use of org.pentaho.di.core.util.StreamLogger in project pentaho-kettle by pentaho.
the class JarfileGenerator method executeCommand.
private static void executeCommand(String[] cmd, File directory) throws IOException, InterruptedException {
String command = "";
for (int i = 0; i < cmd.length; i++) {
command += " " + cmd[i];
}
log.logBasic("Jar generator", "Executing command : " + command);
Runtime runtime = java.lang.Runtime.getRuntime();
Process proc = runtime.exec(cmd, EnvUtil.getEnvironmentVariablesForRuntimeExec(), directory);
// any error message?
StreamLogger errorLogger = new StreamLogger(log, proc.getErrorStream(), "Jar generator (stderr)");
// any output?
StreamLogger outputLogger = new StreamLogger(log, proc.getInputStream(), "Jar generator (stdout)");
// kick them off
new Thread(errorLogger).start();
new Thread(outputLogger).start();
proc.waitFor();
log.logDetailed("Jar generator", "command [" + cmd[0] + "] has finished");
// What's the exit status?
if (proc.exitValue() != 0) {
log.logDetailed("Jar generator", "Exit status of jar command was " + proc.exitValue());
}
// otherwise you get "Too many open files, java.io.IOException" after a lot of iterations
try {
proc.getErrorStream().close();
proc.getInputStream().close();
} catch (IOException e) {
log.logDetailed("Jar generator", "Warning: Error closing streams: " + e.getMessage());
}
}
Aggregations