Search in sources :

Example 66 with ResultFile

use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.

the class JobEntryJob method execute.

@Override
public Result execute(Result result, int nr) throws KettleException {
    result.setEntryNr(nr);
    LogChannelFileWriter logChannelFileWriter = null;
    LogLevel jobLogLevel = parentJob.getLogLevel();
    // Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
    if (parentJobMeta.getNamedClusterEmbedManager() != null) {
        parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
    }
    if (setLogfile) {
        String realLogFilename = environmentSubstitute(getLogFilename());
        // if we do not have one, we must fail
        if (Utils.isEmpty(realLogFilename)) {
            logError(BaseMessages.getString(PKG, "JobJob.Exception.LogFilenameMissing"));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        // create parent folder?
        if (!createParentFolder(realLogFilename)) {
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        try {
            logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), KettleVFS.getFileObject(realLogFilename), setAppendLogfile);
            logChannelFileWriter.startLogging();
        } catch (KettleException e) {
            logError("Unable to open file appender for file [" + getLogFilename() + "] : " + e.toString());
            logError(Const.getStackTracker(e));
            result.setNrErrors(1);
            result.setResult(false);
            return result;
        }
        jobLogLevel = logFileLevel;
    }
    try {
        // First load the job, outside of the loop...
        if (parentJob.getJobMeta() != null) {
            // reset the internal variables again.
            // Maybe we should split up the variables even more like in UNIX shells.
            // The internal variables need to be reset to be able use them properly
            // in 2 sequential sub jobs.
            parentJob.getJobMeta().setInternalKettleVariables();
        }
        // 
        switch(specificationMethod) {
            case REPOSITORY_BY_NAME:
                if (log.isDetailed()) {
                    logDetailed("Loading job from repository : [" + directory + " : " + environmentSubstitute(jobname) + "]");
                }
                break;
            case FILENAME:
                if (log.isDetailed()) {
                    logDetailed("Loading job from XML file : [" + environmentSubstitute(filename) + "]");
                }
                break;
            case REPOSITORY_BY_REFERENCE:
                if (log.isDetailed()) {
                    logDetailed("Loading job from repository by reference : [" + jobObjectId + "]");
                }
                break;
            default:
                break;
        }
        JobMeta jobMeta = getJobMeta(rep, this);
        // 
        if (jobMeta == null) {
            throw new KettleException("Unable to load the job: please specify the name and repository directory OR a filename");
        }
        verifyRecursiveExecution(parentJob, jobMeta);
        int iteration = 0;
        String[] args1 = arguments;
        // no arguments? Check the parent jobs arguments
        if (args1 == null || args1.length == 0) {
            args1 = parentJob.getArguments();
        }
        copyVariablesFrom(parentJob);
        setParentVariableSpace(parentJob);
        // 
        // For the moment only do variable translation at the start of a job, not
        // for every input row (if that would be switched on)
        // 
        String[] args = null;
        if (args1 != null) {
            args = new String[args1.length];
            for (int idx = 0; idx < args1.length; idx++) {
                args[idx] = environmentSubstitute(args1[idx]);
            }
        }
        RowMetaAndData resultRow = null;
        boolean first = true;
        List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(result.getRows());
        while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0)) {
            first = false;
            // 
            if (execPerRow) {
                result.getRows().clear();
            }
            if (rows != null && execPerRow) {
                resultRow = rows.get(iteration);
            } else {
                resultRow = null;
            }
            NamedParams namedParam = new NamedParamsDefault();
            // 
            if (paramsFromPrevious) {
                String[] parentParameters = parentJob.listParameters();
                for (int idx = 0; idx < parentParameters.length; idx++) {
                    String par = parentParameters[idx];
                    String def = parentJob.getParameterDefault(par);
                    String val = parentJob.getParameterValue(par);
                    String des = parentJob.getParameterDescription(par);
                    namedParam.addParameterDefinition(par, def, des);
                    namedParam.setParameterValue(par, val);
                }
            }
            // 
            if (parameters != null) {
                for (int idx = 0; idx < parameters.length; idx++) {
                    if (!Utils.isEmpty(parameters[idx])) {
                        // 
                        if (Const.indexOfString(parameters[idx], namedParam.listParameters()) < 0) {
                            // We have a parameter
                            try {
                                namedParam.addParameterDefinition(parameters[idx], "", "Job entry runtime");
                            } catch (DuplicateParamException e) {
                                // Should never happen
                                // 
                                logError("Duplicate parameter definition for " + parameters[idx]);
                            }
                        }
                        if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                            namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                        } else {
                            // something filled in, in the field column...
                            // 
                            String value = "";
                            if (resultRow != null) {
                                value = resultRow.getString(parameterFieldNames[idx], "");
                            }
                            namedParam.setParameterValue(parameters[idx], value);
                        }
                    }
                }
            }
            Result oneResult = new Result();
            List<RowMetaAndData> sourceRows = null;
            if (execPerRow) {
                if (argFromPrevious) {
                    // Copy the input row to the (command line) arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                } else {
                    // Just pass a single row
                    List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>();
                    newList.add(resultRow);
                    sourceRows = newList;
                }
                if (paramsFromPrevious) {
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            } else {
                if (argFromPrevious) {
                    // Only put the first Row on the arguments
                    args = null;
                    if (resultRow != null) {
                        args = new String[resultRow.size()];
                        for (int i = 0; i < resultRow.size(); i++) {
                            args[i] = resultRow.getString(i, null);
                        }
                    }
                } else {
                    // Keep it as it was...
                    sourceRows = result.getRows();
                }
                if (paramsFromPrevious) {
                    if (parameters != null) {
                        for (int idx = 0; idx < parameters.length; idx++) {
                            if (!Utils.isEmpty(parameters[idx])) {
                                // We have a parameter
                                if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
                                } else {
                                    String fieldValue = "";
                                    if (resultRow != null) {
                                        fieldValue = resultRow.getString(parameterFieldNames[idx], "");
                                    }
                                    // Get the value from the input stream
                                    namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
                                }
                            }
                        }
                    }
                }
            }
            boolean doFallback = true;
            SlaveServer remoteSlaveServer = null;
            JobExecutionConfiguration executionConfiguration = new JobExecutionConfiguration();
            if (!Utils.isEmpty(runConfiguration)) {
                log.logBasic(BaseMessages.getString(PKG, "JobJob.RunConfig.Message"), runConfiguration);
                runConfiguration = environmentSubstitute(runConfiguration);
                executionConfiguration.setRunConfiguration(runConfiguration);
                try {
                    ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, parentJob.getJobMeta(), jobMeta, rep });
                    if (!executionConfiguration.isExecutingLocally() && !executionConfiguration.isExecutingRemotely()) {
                        result.setResult(true);
                        return result;
                    }
                    remoteSlaveServer = executionConfiguration.getRemoteServer();
                    doFallback = false;
                } catch (KettleException e) {
                    log.logError(e.getMessage(), getName());
                    result.setNrErrors(1);
                    result.setResult(false);
                    return result;
                }
            }
            if (doFallback) {
                // 
                if (!Utils.isEmpty(remoteSlaveServerName)) {
                    String realRemoteSlaveServerName = environmentSubstitute(remoteSlaveServerName);
                    remoteSlaveServer = parentJob.getJobMeta().findSlaveServer(realRemoteSlaveServerName);
                    if (remoteSlaveServer == null) {
                        throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.UnableToFindRemoteSlaveServer", realRemoteSlaveServerName));
                    }
                }
            }
            if (remoteSlaveServer == null) {
                // Local execution...
                // 
                // Create a new job
                // 
                job = new Job(rep, jobMeta, this);
                job.setParentJob(parentJob);
                job.setLogLevel(jobLogLevel);
                job.shareVariablesWith(this);
                job.setInternalKettleVariables(this);
                job.copyParametersFrom(jobMeta);
                job.setInteractive(parentJob.isInteractive());
                if (job.isInteractive()) {
                    job.getJobEntryListeners().addAll(parentJob.getJobEntryListeners());
                }
                // Pass the socket repository all around.
                // 
                job.setSocketRepository(parentJob.getSocketRepository());
                // Set the parameters calculated above on this instance.
                // 
                job.clearParameters();
                String[] parameterNames = job.listParameters();
                for (int idx = 0; idx < parameterNames.length; idx++) {
                    // Grab the parameter value set in the job entry
                    // 
                    String thisValue = namedParam.getParameterValue(parameterNames[idx]);
                    if (!Utils.isEmpty(thisValue)) {
                        // Set the value as specified by the user in the job entry
                        // 
                        job.setParameterValue(parameterNames[idx], thisValue);
                    } else {
                        // 
                        if (isPassingAllParameters()) {
                            String parentValue = parentJob.getParameterValue(parameterNames[idx]);
                            if (!Utils.isEmpty(parentValue)) {
                                job.setParameterValue(parameterNames[idx], parentValue);
                            }
                        }
                    }
                }
                job.activateParameters();
                // Set the source rows we calculated above...
                // 
                job.setSourceRows(sourceRows);
                // Don't forget the logging...
                job.beginProcessing();
                // Link the job with the sub-job
                parentJob.getJobTracker().addJobTracker(job.getJobTracker());
                // Link both ways!
                job.getJobTracker().setParentJobTracker(parentJob.getJobTracker());
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    job.setPassedBatchId(parentJob.getBatchId());
                }
                job.setArguments(args);
                // 
                for (DelegationListener delegationListener : parentJob.getDelegationListeners()) {
                    // TODO: copy some settings in the job execution configuration, not strictly needed
                    // but the execution configuration information is useful in case of a job re-start
                    // 
                    delegationListener.jobDelegationStarted(job, new JobExecutionConfiguration());
                }
                JobEntryJobRunner runner = new JobEntryJobRunner(job, result, nr, log);
                Thread jobRunnerThread = new Thread(runner);
                // PDI-6518
                // added UUID to thread name, otherwise threads do share names if jobs entries are executed in parallel in a
                // parent job
                // if that happens, contained transformations start closing each other's connections
                jobRunnerThread.setName(Const.NVL(job.getJobMeta().getName(), job.getJobMeta().getFilename()) + " UUID: " + UUID.randomUUID().toString());
                jobRunnerThread.start();
                // 
                while (!runner.isFinished() && !parentJob.isStopped()) {
                    try {
                        Thread.sleep(0, 1);
                    } catch (InterruptedException e) {
                    // Ignore
                    }
                }
                // if the parent-job was stopped, stop the sub-job too...
                if (parentJob.isStopped()) {
                    job.stopAll();
                    // Wait until finished!
                    runner.waitUntilFinished();
                }
                oneResult = runner.getResult();
            } else {
                // Make sure we can parameterize the slave server connection
                // 
                remoteSlaveServer.shareVariablesWith(this);
                // Remote execution...
                // 
                JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
                // lightClone() because rows are
                jobExecutionConfiguration.setPreviousResult(result.lightClone());
                // overwritten in next line.
                jobExecutionConfiguration.getPreviousResult().setRows(sourceRows);
                jobExecutionConfiguration.setArgumentStrings(args);
                jobExecutionConfiguration.setVariables(this);
                jobExecutionConfiguration.setRemoteServer(remoteSlaveServer);
                jobExecutionConfiguration.setRepository(rep);
                jobExecutionConfiguration.setLogLevel(jobLogLevel);
                jobExecutionConfiguration.setPassingExport(passingExport);
                jobExecutionConfiguration.setExpandingRemoteJob(expandingRemoteJob);
                for (String param : namedParam.listParameters()) {
                    String defValue = namedParam.getParameterDefault(param);
                    String value = namedParam.getParameterValue(param);
                    jobExecutionConfiguration.getParams().put(param, Const.NVL(value, defValue));
                }
                if (parentJob.getJobMeta().isBatchIdPassed()) {
                    jobExecutionConfiguration.setPassedBatchId(parentJob.getBatchId());
                }
                // Send the XML over to the slave server
                // Also start the job over there...
                // 
                String carteObjectId = null;
                try {
                    carteObjectId = Job.sendToSlaveServer(jobMeta, jobExecutionConfiguration, rep, metaStore);
                } catch (KettleException e) {
                    // Perhaps the job exists on the remote server, carte is down, etc.
                    // This is an abort situation, stop the parent job...
                    // We want this in case we are running in parallel. The other job
                    // entries can stop running now.
                    // 
                    parentJob.stopAll();
                    // 
                    throw e;
                }
                // Now start the monitoring...
                // 
                SlaveServerJobStatus jobStatus = null;
                while (!parentJob.isStopped() && waitingToFinish) {
                    try {
                        jobStatus = remoteSlaveServer.getJobStatus(jobMeta.getName(), carteObjectId, 0);
                        if (jobStatus.getResult() != null) {
                            // The job is finished, get the result...
                            // 
                            oneResult = jobStatus.getResult();
                            break;
                        }
                    } catch (Exception e1) {
                        logError("Unable to contact slave server [" + remoteSlaveServer + "] to verify the status of job [" + jobMeta.getName() + "]", e1);
                        oneResult.setNrErrors(1L);
                        // Stop looking too, chances are too low the server will
                        break;
                    // come back on-line
                    }
                    // sleep for 1 second
                    try {
                        Thread.sleep(1000);
                    } catch (InterruptedException e) {
                    // Ignore
                    }
                }
                // Write log from carte to file
                if (setLogfile && jobStatus != null) {
                    String logFromCarte = jobStatus.getLoggingString();
                    if (!Utils.isEmpty(logFromCarte)) {
                        FileObject logfile = logChannelFileWriter.getLogFile();
                        OutputStream logFileOutputStream = null;
                        try {
                            logFileOutputStream = KettleVFS.getOutputStream(logfile, setAppendLogfile);
                            logFileOutputStream.write(logFromCarte.getBytes());
                            logFileOutputStream.flush();
                        } catch (Exception e) {
                            logError("There was an error logging to file '" + logfile + "'", e);
                        } finally {
                            try {
                                if (logFileOutputStream != null) {
                                    logFileOutputStream.close();
                                    logFileOutputStream = null;
                                }
                            } catch (Exception e) {
                                logError("There was an error closing log file file '" + logfile + "'", e);
                            }
                        }
                    }
                }
                if (!waitingToFinish) {
                    // Since the job was posted successfully, the result is true...
                    // 
                    oneResult = new Result();
                    oneResult.setResult(true);
                }
                if (parentJob.isStopped()) {
                    try {
                        // 
                        if (jobStatus == null || jobStatus.isRunning()) {
                            // Try a remote abort ...
                            // 
                            remoteSlaveServer.stopJob(jobMeta.getName(), carteObjectId);
                        }
                    } catch (Exception e1) {
                        logError("Unable to contact slave server [" + remoteSlaveServer + "] to stop job [" + jobMeta.getName() + "]", e1);
                        oneResult.setNrErrors(1L);
                        // Stop looking too, chances are too low the server will
                        break;
                    // come back on-line
                    }
                }
            }
            // clear only the numbers, NOT the files or rows.
            result.clear();
            result.add(oneResult);
            // Set the result rows too, if any ...
            if (!Utils.isEmpty(oneResult.getRows())) {
                result.setRows(new ArrayList<RowMetaAndData>(oneResult.getRows()));
            }
            // 
            if (oneResult.getResult() == false) {
                result.setNrErrors(result.getNrErrors() + 1);
            }
            iteration++;
        }
    } catch (KettleException ke) {
        logError("Error running job entry 'job' : ", ke);
        result.setResult(false);
        result.setNrErrors(1L);
    }
    if (setLogfile) {
        if (logChannelFileWriter != null) {
            logChannelFileWriter.stopLogging();
            ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentJob.getJobname(), getName());
            result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
            // 
            if (logChannelFileWriter.getException() != null) {
                logError("Unable to open log file [" + getLogFilename() + "] : ");
                logError(Const.getStackTracker(logChannelFileWriter.getException()));
                result.setNrErrors(1);
                result.setResult(false);
                return result;
            }
        }
    }
    if (result.getNrErrors() > 0) {
        result.setResult(false);
    } else {
        result.setResult(true);
    }
    return result;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) NamedParams(org.pentaho.di.core.parameters.NamedParams) OutputStream(java.io.OutputStream) ArrayList(java.util.ArrayList) SlaveServer(org.pentaho.di.cluster.SlaveServer) LogLevel(org.pentaho.di.core.logging.LogLevel) Result(org.pentaho.di.core.Result) SlaveServerJobStatus(org.pentaho.di.www.SlaveServerJobStatus) RowMetaAndData(org.pentaho.di.core.RowMetaAndData) FileObject(org.apache.commons.vfs2.FileObject) Job(org.pentaho.di.job.Job) NamedParamsDefault(org.pentaho.di.core.parameters.NamedParamsDefault) LogChannelFileWriter(org.pentaho.di.core.logging.LogChannelFileWriter) ResultFile(org.pentaho.di.core.ResultFile) JobExecutionConfiguration(org.pentaho.di.job.JobExecutionConfiguration) KettleExtensionPoint(org.pentaho.di.core.extension.KettleExtensionPoint) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) DuplicateParamException(org.pentaho.di.core.parameters.DuplicateParamException) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) DelegationListener(org.pentaho.di.job.DelegationListener)

Example 67 with ResultFile

use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.

the class JobEntryMssqlBulkLoad method execute.

public Result execute(Result previousResult, int nr) {
    String TakeFirstNbrLines = "";
    String LineTerminatedby = "";
    String FieldTerminatedby = "";
    boolean useFieldSeparator = false;
    String UseCodepage = "";
    String ErrorfileName = "";
    Result result = previousResult;
    result.setResult(false);
    String vfsFilename = environmentSubstitute(filename);
    FileObject fileObject = null;
    // Let's check the filename ...
    if (!Utils.isEmpty(vfsFilename)) {
        try {
            // User has specified a file, We can continue ...
            // 
            // This is running over VFS but we need a normal file.
            // As such, we're going to verify that it's a local file...
            // We're also going to convert VFS FileObject to File
            // 
            fileObject = KettleVFS.getFileObject(vfsFilename, this);
            if (!(fileObject instanceof LocalFile)) {
                // 
                throw new KettleException(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.OnlyLocalFileSupported", vfsFilename));
            }
            // Convert it to a regular platform specific file name
            // 
            String realFilename = KettleVFS.getFilename(fileObject);
            // Here we go... back to the regular scheduled program...
            // 
            File file = new File(realFilename);
            if (file.exists() && file.canRead()) {
                // User has specified an existing file, We can continue ...
                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "JobMssqlBulkLoad.FileExists.Label", realFilename));
                }
                if (connection != null) {
                    // User has specified a connection, We can continue ...
                    Database db = new Database(this, connection);
                    if (!(db.getDatabaseMeta().getDatabaseInterface() instanceof MSSQLServerDatabaseMeta)) {
                        logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.DbNotMSSQL", connection.getDatabaseName()));
                        return result;
                    }
                    db.shareVariablesWith(this);
                    try {
                        db.connect(parentJob.getTransactionId(), null);
                        // Get schemaname
                        String realSchemaname = environmentSubstitute(schemaname);
                        // Get tablename
                        String realTablename = environmentSubstitute(tablename);
                        // Add schemaname (Most the time Schemaname.Tablename)
                        if (schemaname != null) {
                            realTablename = realSchemaname + "." + realTablename;
                        }
                        if (db.checkTableExists(realTablename)) {
                            // The table existe, We can continue ...
                            if (log.isDetailed()) {
                                logDetailed(BaseMessages.getString(PKG, "JobMssqlBulkLoad.TableExists.Label", realTablename));
                            }
                            // FIELDTERMINATOR
                            String Fieldterminator = getRealFieldTerminator();
                            if (Utils.isEmpty(Fieldterminator) && (datafiletype.equals("char") || datafiletype.equals("widechar"))) {
                                logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.FieldTerminatorMissing"));
                                return result;
                            } else {
                                if (datafiletype.equals("char") || datafiletype.equals("widechar")) {
                                    useFieldSeparator = true;
                                    FieldTerminatedby = "FIELDTERMINATOR='" + Fieldterminator + "'";
                                }
                            }
                            // Check Specific Code page
                            if (codepage.equals("Specific")) {
                                String realCodePage = environmentSubstitute(codepage);
                                if (specificcodepage.length() < 0) {
                                    logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.SpecificCodePageMissing"));
                                    return result;
                                } else {
                                    UseCodepage = "CODEPAGE = '" + realCodePage + "'";
                                }
                            } else {
                                UseCodepage = "CODEPAGE = '" + codepage + "'";
                            }
                            // Check Error file
                            String realErrorFile = environmentSubstitute(errorfilename);
                            if (realErrorFile != null) {
                                File errorfile = new File(realErrorFile);
                                if (errorfile.exists() && !adddatetime) {
                                    // The error file is created when the command is executed. An error occurs if the file already
                                    // exists.
                                    logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.ErrorFileExists"));
                                    return result;
                                }
                                if (adddatetime) {
                                    // Add date time to filename...
                                    SimpleDateFormat daf = new SimpleDateFormat();
                                    Date now = new Date();
                                    daf.applyPattern("yyyMMdd_HHmmss");
                                    String d = daf.format(now);
                                    ErrorfileName = "ERRORFILE ='" + realErrorFile + "_" + d + "'";
                                } else {
                                    ErrorfileName = "ERRORFILE ='" + realErrorFile + "'";
                                }
                            }
                            // ROWTERMINATOR
                            String Rowterminator = getRealLineterminated();
                            if (!Utils.isEmpty(Rowterminator)) {
                                LineTerminatedby = "ROWTERMINATOR='" + Rowterminator + "'";
                            }
                            // Start file at
                            if (startfile > 0) {
                                TakeFirstNbrLines = "FIRSTROW=" + startfile;
                            }
                            // End file at
                            if (endfile > 0) {
                                TakeFirstNbrLines = "LASTROW=" + endfile;
                            }
                            // Truncate table?
                            String SQLBULKLOAD = "";
                            if (truncate) {
                                SQLBULKLOAD = "TRUNCATE TABLE " + realTablename + ";";
                            }
                            // Build BULK Command
                            SQLBULKLOAD = SQLBULKLOAD + "BULK INSERT " + realTablename + " FROM " + "'" + realFilename.replace('\\', '/') + "'";
                            SQLBULKLOAD = SQLBULKLOAD + " WITH (";
                            if (useFieldSeparator) {
                                SQLBULKLOAD = SQLBULKLOAD + FieldTerminatedby;
                            } else {
                                SQLBULKLOAD = SQLBULKLOAD + "DATAFILETYPE ='" + datafiletype + "'";
                            }
                            if (LineTerminatedby.length() > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + "," + LineTerminatedby;
                            }
                            if (TakeFirstNbrLines.length() > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + "," + TakeFirstNbrLines;
                            }
                            if (UseCodepage.length() > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + "," + UseCodepage;
                            }
                            String realFormatFile = environmentSubstitute(formatfilename);
                            if (realFormatFile != null) {
                                SQLBULKLOAD = SQLBULKLOAD + ", FORMATFILE='" + realFormatFile + "'";
                            }
                            if (firetriggers) {
                                SQLBULKLOAD = SQLBULKLOAD + ",FIRE_TRIGGERS";
                            }
                            if (keepnulls) {
                                SQLBULKLOAD = SQLBULKLOAD + ",KEEPNULLS";
                            }
                            if (keepidentity) {
                                SQLBULKLOAD = SQLBULKLOAD + ",KEEPIDENTITY";
                            }
                            if (checkconstraints) {
                                SQLBULKLOAD = SQLBULKLOAD + ",CHECK_CONSTRAINTS";
                            }
                            if (tablock) {
                                SQLBULKLOAD = SQLBULKLOAD + ",TABLOCK";
                            }
                            if (orderby != null) {
                                SQLBULKLOAD = SQLBULKLOAD + ",ORDER ( " + orderby + " " + orderdirection + ")";
                            }
                            if (ErrorfileName.length() > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + ", " + ErrorfileName;
                            }
                            if (maxerrors > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + ", MAXERRORS=" + maxerrors;
                            }
                            if (batchsize > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + ", BATCHSIZE=" + batchsize;
                            }
                            if (rowsperbatch > 0) {
                                SQLBULKLOAD = SQLBULKLOAD + ", ROWS_PER_BATCH=" + rowsperbatch;
                            }
                            // End of Bulk command
                            SQLBULKLOAD = SQLBULKLOAD + ")";
                            try {
                                // Run the SQL
                                db.execStatement(SQLBULKLOAD);
                                // Everything is OK...we can disconnect now
                                db.disconnect();
                                if (isAddFileToResult()) {
                                    // Add filename to output files
                                    ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(), toString());
                                    result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
                                }
                                result.setResult(true);
                            } catch (KettleDatabaseException je) {
                                result.setNrErrors(1);
                                logError("An error occurred executing this job entry : " + je.getMessage(), je);
                            } catch (KettleFileException e) {
                                logError("An error occurred executing this job entry : " + e.getMessage(), e);
                                result.setNrErrors(1);
                            } finally {
                                if (db != null) {
                                    db.disconnect();
                                    db = null;
                                }
                            }
                        } else {
                            // Of course, the table should have been created already before the bulk load operation
                            db.disconnect();
                            result.setNrErrors(1);
                            logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.TableNotExists", realTablename));
                        }
                    } catch (KettleDatabaseException dbe) {
                        db.disconnect();
                        result.setNrErrors(1);
                        logError("An error occurred executing this entry: " + dbe.getMessage());
                    }
                } else {
                    // No database connection is defined
                    result.setNrErrors(1);
                    logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Nodatabase.Label"));
                }
            } else {
                // the file doesn't exist
                result.setNrErrors(1);
                logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.FileNotExists", realFilename));
            }
        } catch (Exception e) {
            // An unexpected error occurred
            result.setNrErrors(1);
            logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.UnexpectedError.Label"), e);
        } finally {
            try {
                if (fileObject != null) {
                    fileObject.close();
                }
            } catch (Exception e) {
            // Ignore errors
            }
        }
    } else {
        // No file was specified
        result.setNrErrors(1);
        logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Nofilename.Label"));
    }
    return result;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) ResultFile(org.pentaho.di.core.ResultFile) Date(java.util.Date) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) Result(org.pentaho.di.core.Result) LocalFile(org.apache.commons.vfs2.provider.local.LocalFile) Database(org.pentaho.di.core.database.Database) FileObject(org.apache.commons.vfs2.FileObject) MSSQLServerDatabaseMeta(org.pentaho.di.core.database.MSSQLServerDatabaseMeta) LocalFile(org.apache.commons.vfs2.provider.local.LocalFile) File(java.io.File) ResultFile(org.pentaho.di.core.ResultFile) SimpleDateFormat(java.text.SimpleDateFormat)

Example 68 with ResultFile

use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.

the class JobEntryMysqlBulkLoad method execute.

public Result execute(Result previousResult, int nr) {
    String ReplaceIgnore;
    String IgnoreNbrLignes = "";
    String ListOfColumn = "";
    String LocalExec = "";
    String PriorityText = "";
    String LineTerminatedby = "";
    String FieldTerminatedby = "";
    Result result = previousResult;
    result.setResult(false);
    String vfsFilename = environmentSubstitute(filename);
    // Let's check the filename ...
    if (!Utils.isEmpty(vfsFilename)) {
        try {
            // User has specified a file, We can continue ...
            // 
            // This is running over VFS but we need a normal file.
            // As such, we're going to verify that it's a local file...
            // We're also going to convert VFS FileObject to File
            // 
            FileObject fileObject = KettleVFS.getFileObject(vfsFilename, this);
            if (!(fileObject instanceof LocalFile)) {
                // 
                throw new KettleException("Only local files are supported at this time, file [" + vfsFilename + "] is not a local file.");
            }
            // Convert it to a regular platform specific file name
            // 
            String realFilename = KettleVFS.getFilename(fileObject);
            // Here we go... back to the regular scheduled program...
            // 
            File file = new File(realFilename);
            if ((file.exists() && file.canRead()) || isLocalInfile() == false) {
                // User has specified an existing file, We can continue ...
                if (log.isDetailed()) {
                    logDetailed("File [" + realFilename + "] exists.");
                }
                if (connection != null) {
                    // User has specified a connection, We can continue ...
                    Database db = new Database(this, connection);
                    db.shareVariablesWith(this);
                    try {
                        db.connect(parentJob.getTransactionId(), null);
                        // Get schemaname
                        String realSchemaname = environmentSubstitute(schemaname);
                        // Get tablename
                        String realTablename = environmentSubstitute(tablename);
                        if (db.checkTableExists(realTablename)) {
                            // The table existe, We can continue ...
                            if (log.isDetailed()) {
                                logDetailed("Table [" + realTablename + "] exists.");
                            }
                            // Add schemaname (Most the time Schemaname.Tablename)
                            if (schemaname != null) {
                                realTablename = realSchemaname + "." + realTablename;
                            }
                            // Set the REPLACE or IGNORE
                            if (isReplacedata()) {
                                ReplaceIgnore = "REPLACE";
                            } else {
                                ReplaceIgnore = "IGNORE";
                            }
                            // Set the IGNORE LINES
                            if (Const.toInt(getRealIgnorelines(), 0) > 0) {
                                IgnoreNbrLignes = "IGNORE " + getRealIgnorelines() + " LINES";
                            }
                            // Set list of Column
                            if (getRealListattribut() != null) {
                                ListOfColumn = "(" + MysqlString(getRealListattribut()) + ")";
                            }
                            // Local File execution
                            if (isLocalInfile()) {
                                LocalExec = "LOCAL";
                            }
                            // Prority
                            if (prorityvalue == 1) {
                                // LOW
                                PriorityText = "LOW_PRIORITY";
                            } else if (prorityvalue == 2) {
                                // CONCURRENT
                                PriorityText = "CONCURRENT";
                            }
                            // Fields ....
                            if (getRealSeparator() != null || getRealEnclosed() != null || getRealEscaped() != null) {
                                FieldTerminatedby = "FIELDS ";
                                if (getRealSeparator() != null) {
                                    FieldTerminatedby = FieldTerminatedby + "TERMINATED BY '" + Const.replace(getRealSeparator(), "'", "''") + "'";
                                }
                                if (getRealEnclosed() != null) {
                                    FieldTerminatedby = FieldTerminatedby + " ENCLOSED BY '" + Const.replace(getRealEnclosed(), "'", "''") + "'";
                                }
                                if (getRealEscaped() != null) {
                                    FieldTerminatedby = FieldTerminatedby + " ESCAPED BY '" + Const.replace(getRealEscaped(), "'", "''") + "'";
                                }
                            }
                            // LINES ...
                            if (getRealLinestarted() != null || getRealLineterminated() != null) {
                                LineTerminatedby = "LINES ";
                                // Line starting By
                                if (getRealLinestarted() != null) {
                                    LineTerminatedby = LineTerminatedby + "STARTING BY '" + Const.replace(getRealLinestarted(), "'", "''") + "'";
                                }
                                // Line terminating By
                                if (getRealLineterminated() != null) {
                                    LineTerminatedby = LineTerminatedby + " TERMINATED BY '" + Const.replace(getRealLineterminated(), "'", "''") + "'";
                                }
                            }
                            String SQLBULKLOAD = "LOAD DATA " + PriorityText + " " + LocalExec + " INFILE '" + realFilename.replace('\\', '/') + "' " + ReplaceIgnore + " INTO TABLE " + realTablename + " " + FieldTerminatedby + " " + LineTerminatedby + " " + IgnoreNbrLignes + " " + ListOfColumn + ";";
                            try {
                                // Run the SQL
                                db.execStatement(SQLBULKLOAD);
                                // Everything is OK...we can deconnect now
                                db.disconnect();
                                if (isAddFileToResult()) {
                                    // Add zip filename to output files
                                    ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(), toString());
                                    result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
                                }
                                result.setResult(true);
                            } catch (KettleDatabaseException je) {
                                db.disconnect();
                                result.setNrErrors(1);
                                logError("An error occurred executing this job entry : " + je.getMessage());
                            } catch (KettleFileException e) {
                                logError("An error occurred executing this job entry : " + e.getMessage());
                                result.setNrErrors(1);
                            }
                        } else {
                            // Of course, the table should have been created already before the bulk load operation
                            db.disconnect();
                            result.setNrErrors(1);
                            if (log.isDetailed()) {
                                logDetailed("Table [" + realTablename + "] doesn't exist!");
                            }
                        }
                    } catch (KettleDatabaseException dbe) {
                        db.disconnect();
                        result.setNrErrors(1);
                        logError("An error occurred executing this entry: " + dbe.getMessage());
                    }
                } else {
                    // No database connection is defined
                    result.setNrErrors(1);
                    logError(BaseMessages.getString(PKG, "JobMysqlBulkLoad.Nodatabase.Label"));
                }
            } else {
                // the file doesn't exist
                result.setNrErrors(1);
                logError("File [" + realFilename + "] doesn't exist!");
            }
        } catch (Exception e) {
            // An unexpected error occurred
            result.setNrErrors(1);
            logError(BaseMessages.getString(PKG, "JobMysqlBulkLoad.UnexpectedError.Label"), e);
        }
    } else {
        // No file was specified
        result.setNrErrors(1);
        logError(BaseMessages.getString(PKG, "JobMysqlBulkLoad.Nofilename.Label"));
    }
    return result;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) LocalFile(org.apache.commons.vfs2.provider.local.LocalFile) KettleFileException(org.pentaho.di.core.exception.KettleFileException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) Database(org.pentaho.di.core.database.Database) FileObject(org.apache.commons.vfs2.FileObject) ResultFile(org.pentaho.di.core.ResultFile) LocalFile(org.apache.commons.vfs2.provider.local.LocalFile) File(java.io.File) ResultFile(org.pentaho.di.core.ResultFile) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) Result(org.pentaho.di.core.Result)

Example 69 with ResultFile

use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.

the class JobEntryPGPEncryptFiles method addFileToResultFilenames.

private void addFileToResultFilenames(String fileaddentry, Result result, Job parentJob) {
    try {
        ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(fileaddentry), parentJob.getJobname(), toString());
        result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
        if (isDebug()) {
            logDebug(" ------ ");
            logDebug(BaseMessages.getString(PKG, "JobPGPEncryptFiles.Log.FileAddedToResultFilesName", fileaddentry));
        }
    } catch (Exception e) {
        logError(BaseMessages.getString(PKG, "JobPGPEncryptFiles.Error.AddingToFilenameResult"), fileaddentry + "" + e.getMessage());
    }
}
Also used : ResultFile(org.pentaho.di.core.ResultFile) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) IOException(java.io.IOException)

Example 70 with ResultFile

use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.

the class JobEntryExportRepository method addFileToResultFilenames.

private void addFileToResultFilenames(String fileaddentry, LogChannelInterface log, Result result, Job parentJob) {
    try {
        ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(fileaddentry, this), parentJob.getJobname(), toString());
        result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
        if (log.isDebug()) {
            logDebug(BaseMessages.getString(PKG, "JobExportRepository.Log.FileAddedToResultFilesName", fileaddentry));
        }
    } catch (Exception e) {
        log.logError(BaseMessages.getString(PKG, "JobExportRepository.Error.AddingToFilenameResult"), fileaddentry + "" + e.getMessage());
    }
}
Also used : ResultFile(org.pentaho.di.core.ResultFile) KettleException(org.pentaho.di.core.exception.KettleException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException)

Aggregations

ResultFile (org.pentaho.di.core.ResultFile)83 KettleException (org.pentaho.di.core.exception.KettleException)65 FileObject (org.apache.commons.vfs2.FileObject)32 IOException (java.io.IOException)29 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)29 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)28 Result (org.pentaho.di.core.Result)20 KettleFileException (org.pentaho.di.core.exception.KettleFileException)16 KettleStepException (org.pentaho.di.core.exception.KettleStepException)12 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)11 File (java.io.File)10 OutputStream (java.io.OutputStream)10 Date (java.util.Date)9 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)6 FileInputStream (java.io.FileInputStream)5 KettleValueException (org.pentaho.di.core.exception.KettleValueException)5 ArrayList (java.util.ArrayList)4 Matcher (java.util.regex.Matcher)4 Pattern (java.util.regex.Pattern)4 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)4