use of org.pentaho.di.core.util.StreamLogger in project pentaho-kettle by pentaho.
the class JobEntryShell method createTemporaryShellFile.
private FileObject createTemporaryShellFile(FileObject tempFile, String fileContent) throws Exception {
//
if (tempFile != null && fileContent != null) {
try {
// flag indicates if current OS is Windows or not
boolean isWindows = Const.isWindows();
if (!isWindows) {
fileContent = replaceWinEOL(fileContent);
}
tempFile.createFile();
OutputStream outputStream = tempFile.getContent().getOutputStream();
outputStream.write(fileContent.getBytes());
outputStream.close();
if (!isWindows) {
String tempFilename = KettleVFS.getFilename(tempFile);
// Now we have to make this file executable...
// On Unix-like systems this is done using the command "/bin/chmod +x filename"
//
ProcessBuilder procBuilder = new ProcessBuilder("chmod", "+x", tempFilename);
Process proc = procBuilder.start();
// Eat/log stderr/stdout all messages in a different thread...
StreamLogger errorLogger = new StreamLogger(log, proc.getErrorStream(), toString() + " (stderr)");
StreamLogger outputLogger = new StreamLogger(log, proc.getInputStream(), toString() + " (stdout)");
new Thread(errorLogger).start();
new Thread(outputLogger).start();
proc.waitFor();
}
} catch (Exception e) {
throw new Exception("Unable to create temporary file to execute script", e);
}
}
return tempFile;
}
use of org.pentaho.di.core.util.StreamLogger in project pentaho-kettle by pentaho.
the class JobEntryShell method executeShell.
private void executeShell(Result result, List<RowMetaAndData> cmdRows, String[] args) {
FileObject fileObject = null;
String realScript = null;
FileObject tempFile = null;
try {
// What's the exact command?
String[] base = null;
List<String> cmds = new ArrayList<String>();
if (log.isBasic()) {
logBasic(BaseMessages.getString(PKG, "JobShell.RunningOn", Const.getOS()));
}
if (insertScript) {
realScript = environmentSubstitute(script);
} else {
String realFilename = environmentSubstitute(getFilename());
fileObject = KettleVFS.getFileObject(realFilename, this);
}
if (Const.getOS().equals("Windows 95")) {
base = new String[] { "command.com", "/C" };
if (insertScript) {
tempFile = KettleVFS.createTempFile("kettle", "shell.bat", System.getProperty("java.io.tmpdir"), this);
fileObject = createTemporaryShellFile(tempFile, realScript);
}
} else if (Const.getOS().startsWith("Windows")) {
base = new String[] { "cmd.exe", "/C" };
if (insertScript) {
tempFile = KettleVFS.createTempFile("kettle", "shell.bat", System.getProperty("java.io.tmpdir"), this);
fileObject = createTemporaryShellFile(tempFile, realScript);
}
} else {
if (insertScript) {
tempFile = KettleVFS.createTempFile("kettle", "shell", System.getProperty("java.io.tmpdir"), this);
fileObject = createTemporaryShellFile(tempFile, realScript);
}
base = new String[] { KettleVFS.getFilename(fileObject) };
}
// Construct the arguments...
if (argFromPrevious && cmdRows != null) {
// Add the base command...
for (int i = 0; i < base.length; i++) {
cmds.add(base[i]);
}
if (Const.getOS().equals("Windows 95") || Const.getOS().startsWith("Windows")) {
// for windows all arguments including the command itself
// need to be
// included in 1 argument to cmd/command.
StringBuilder cmdline = new StringBuilder(300);
cmdline.append('"');
cmdline.append(Const.optionallyQuoteStringByOS(KettleVFS.getFilename(fileObject)));
// Add the arguments from previous results...
for (int i = 0; i < cmdRows.size(); i++) {
// Normally just one row, but once in a while to remain compatible we have multiple.
RowMetaAndData r = cmdRows.get(i);
for (int j = 0; j < r.size(); j++) {
cmdline.append(' ');
cmdline.append(Const.optionallyQuoteStringByOS(r.getString(j, null)));
}
}
cmdline.append('"');
cmds.add(cmdline.toString());
} else {
// Add the arguments from previous results...
for (int i = 0; i < cmdRows.size(); i++) {
// Normally just one row, but once in a while to remain compatible we have multiple.
RowMetaAndData r = cmdRows.get(i);
for (int j = 0; j < r.size(); j++) {
cmds.add(Const.optionallyQuoteStringByOS(r.getString(j, null)));
}
}
}
} else if (args != null) {
// Add the base command...
for (int i = 0; i < base.length; i++) {
cmds.add(base[i]);
}
if (Const.getOS().equals("Windows 95") || Const.getOS().startsWith("Windows")) {
// for windows all arguments including the command itself
// need to be
// included in 1 argument to cmd/command.
StringBuilder cmdline = new StringBuilder(300);
cmdline.append('"');
cmdline.append(Const.optionallyQuoteStringByOS(KettleVFS.getFilename(fileObject)));
for (int i = 0; i < args.length; i++) {
cmdline.append(' ');
cmdline.append(Const.optionallyQuoteStringByOS(args[i]));
}
cmdline.append('"');
cmds.add(cmdline.toString());
} else {
for (int i = 0; i < args.length; i++) {
cmds.add(args[i]);
}
}
}
StringBuilder command = new StringBuilder();
Iterator<String> it = cmds.iterator();
boolean first = true;
while (it.hasNext()) {
if (!first) {
command.append(' ');
} else {
first = false;
}
command.append(it.next());
}
if (log.isBasic()) {
logBasic(BaseMessages.getString(PKG, "JobShell.ExecCommand", command.toString()));
}
// Build the environment variable list...
ProcessBuilder procBuilder = new ProcessBuilder(cmds);
Map<String, String> env = procBuilder.environment();
String[] variables = listVariables();
for (int i = 0; i < variables.length; i++) {
env.put(variables[i], getVariable(variables[i]));
}
if (getWorkDirectory() != null && !Utils.isEmpty(Const.rtrim(getWorkDirectory()))) {
String vfsFilename = environmentSubstitute(getWorkDirectory());
File file = new File(KettleVFS.getFilename(KettleVFS.getFileObject(vfsFilename, this)));
procBuilder.directory(file);
}
Process proc = procBuilder.start();
// any error message?
StreamLogger errorLogger = new StreamLogger(log, proc.getErrorStream(), "(stderr)", true);
// any output?
StreamLogger outputLogger = new StreamLogger(log, proc.getInputStream(), "(stdout)");
// kick them off
Thread errorLoggerThread = new Thread(errorLogger);
errorLoggerThread.start();
Thread outputLoggerThread = new Thread(outputLogger);
outputLoggerThread.start();
proc.waitFor();
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobShell.CommandFinished", command.toString()));
}
// What's the exit status?
result.setExitStatus(proc.exitValue());
if (result.getExitStatus() != 0) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobShell.ExitStatus", environmentSubstitute(getFilename()), "" + result.getExitStatus()));
}
result.setNrErrors(1);
}
// wait until loggers read all data from stdout and stderr
errorLoggerThread.join();
outputLoggerThread.join();
// close the streams
// otherwise you get "Too many open files, java.io.IOException" after a lot of iterations
proc.getErrorStream().close();
proc.getOutputStream().close();
} catch (IOException ioe) {
logError(BaseMessages.getString(PKG, "JobShell.ErrorRunningShell", environmentSubstitute(getFilename()), ioe.toString()), ioe);
result.setNrErrors(1);
} catch (InterruptedException ie) {
logError(BaseMessages.getString(PKG, "JobShell.Shellinterupted", environmentSubstitute(getFilename()), ie.toString()), ie);
result.setNrErrors(1);
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "JobShell.UnexpectedError", environmentSubstitute(getFilename()), e.toString()), e);
result.setNrErrors(1);
} finally {
//
if (tempFile != null) {
try {
tempFile.delete();
} catch (Exception e) {
BaseMessages.getString(PKG, "JobShell.UnexpectedError", tempFile.toString(), e.toString());
}
}
}
if (result.getNrErrors() > 0) {
result.setResult(false);
} else {
result.setResult(true);
}
}
use of org.pentaho.di.core.util.StreamLogger in project pentaho-kettle by pentaho.
the class LucidDBBulkLoader method execute.
public boolean execute(LucidDBBulkLoaderMeta meta, boolean wait) throws KettleException {
Runtime rt = Runtime.getRuntime();
try {
String tableName = environmentSubstitute(meta.getTableName());
// 1) Set up the FIFO folder, create the directory and path to it...
//
String fifoVfsDirectory = environmentSubstitute(meta.getFifoDirectory());
FileObject directory = KettleVFS.getFileObject(fifoVfsDirectory, getTransMeta());
directory.createFolder();
String fifoDirectory = KettleVFS.getFilename(directory);
// 2) Create the FIFO file using the "mkfifo" command...
// Make sure to log all the possible output, also from STDERR
//
data.fifoFilename = KettleVFS.getFilename(directory) + Const.FILE_SEPARATOR + tableName + ".csv";
data.bcpFilename = KettleVFS.getFilename(directory) + Const.FILE_SEPARATOR + tableName + ".bcp";
File fifoFile = new File(data.fifoFilename);
if (!fifoFile.exists()) {
String mkFifoCmd = "mkfifo " + data.fifoFilename + "";
logBasic("Creating FIFO file using this command : " + mkFifoCmd);
Process mkFifoProcess = rt.exec(mkFifoCmd);
StreamLogger errorLogger = new StreamLogger(log, mkFifoProcess.getErrorStream(), "mkFifoError");
StreamLogger outputLogger = new StreamLogger(log, mkFifoProcess.getInputStream(), "mkFifoOuptut");
new Thread(errorLogger).start();
new Thread(outputLogger).start();
int result = mkFifoProcess.waitFor();
if (result != 0) {
throw new Exception("Return code " + result + " received from statement : " + mkFifoCmd);
}
}
// 3) Make a connection to LucidDB for sending SQL commands
// (Also, we need a clear cache for getting up-to-date target metadata)
DBCache.getInstance().clear(meta.getDatabaseMeta().getName());
if (meta.getDatabaseMeta() == null) {
logError(BaseMessages.getString(PKG, "LuciDBBulkLoader.Init.ConnectionMissing", getStepname()));
return false;
}
data.db = new Database(this, meta.getDatabaseMeta());
data.db.shareVariablesWith(this);
// Connect to the database
if (getTransMeta().isUsingUniqueConnections()) {
synchronized (getTrans()) {
data.db.connect(getTrans().getTransactionId(), getPartitionID());
}
} else {
data.db.connect(getPartitionID());
}
logBasic("Connected to LucidDB");
// 4) Now we are ready to create the LucidDB FIFO server that will handle the actual bulk loading.
//
String fifoServerStatement = "";
fifoServerStatement += "create or replace server " + meta.getFifoServerName() + Const.CR;
fifoServerStatement += "foreign data wrapper sys_file_wrapper" + Const.CR;
fifoServerStatement += "options (" + Const.CR;
fifoServerStatement += "directory '" + fifoDirectory + "'," + Const.CR;
fifoServerStatement += "file_extension 'csv'," + Const.CR;
fifoServerStatement += "with_header 'no'," + Const.CR;
fifoServerStatement += "num_rows_scan '0'," + Const.CR;
fifoServerStatement += "lenient 'no');" + Const.CR;
logBasic("Creating LucidDB fifo_server with the following command: " + fifoServerStatement);
data.db.execStatements(fifoServerStatement);
// 5) Set the error limit in the LucidDB session
// REVIEW jvs 13-Dec-2008: is this guaranteed to retain the same
// connection?
String errorMaxStatement = "";
errorMaxStatement += "alter session set \"errorMax\" = " + meta.getMaxErrors() + ";" + Const.CR;
logBasic("Setting error limit in LucidDB session with the following command: " + errorMaxStatement);
data.db.execStatements(errorMaxStatement);
// 6) Now we also need to create a bulk loader file .bcp
//
createBulkLoadConfigFile(data.bcpFilename);
// 7) execute the actual load command!
// This will actually block until the load is done in the
// separate execution thread; see notes in executeLoadCommand
// on why it's important for this to occur BEFORE
// opening our end of the FIFO.
//
executeLoadCommand(tableName);
// 8) We have to write rows to the FIFO file later on.
data.fifoStream = new BufferedOutputStream(new FileOutputStream(fifoFile));
} catch (Exception ex) {
throw new KettleException(ex);
}
return true;
}
Aggregations