Search in sources :

Example 46 with ResultFile

use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.

the class SQLFileOutput method openNewFile.

public boolean openNewFile() {
    boolean retval = false;
    data.writer = null;
    try {
        String filename = buildFilename();
        if (meta.AddToResult()) {
            // Add this to the result file names...
            ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(filename, getTransMeta()), getTransMeta().getName(), getStepname());
            resultFile.setComment("This file was created with a text file output step");
            addResultFile(resultFile);
        }
        OutputStream outputStream;
        if (log.isDetailed()) {
            logDetailed("Opening output stream in nocompress mode");
        }
        OutputStream fos = KettleVFS.getOutputStream(filename, getTransMeta(), meta.isFileAppended());
        outputStream = fos;
        if (log.isDetailed()) {
            logDetailed("Opening output stream in default encoding");
        }
        data.writer = new OutputStreamWriter(new BufferedOutputStream(outputStream, 5000));
        if (!Utils.isEmpty(meta.getEncoding())) {
            if (log.isBasic()) {
                logDetailed("Opening output stream in encoding: " + meta.getEncoding());
            }
            data.writer = new OutputStreamWriter(new BufferedOutputStream(outputStream, 5000), environmentSubstitute(meta.getEncoding()));
        } else {
            if (log.isBasic()) {
                logDetailed("Opening output stream in default encoding");
            }
            data.writer = new OutputStreamWriter(new BufferedOutputStream(outputStream, 5000));
        }
        if (log.isDetailed()) {
            logDetailed("Opened new file with name [" + filename + "]");
        }
        data.splitnr++;
        retval = true;
    } catch (Exception e) {
        logError("Error opening new file : " + e.toString());
    }
    return retval;
}
Also used : OutputStream(java.io.OutputStream) BufferedOutputStream(java.io.BufferedOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) ResultFile(org.pentaho.di.core.ResultFile) BufferedOutputStream(java.io.BufferedOutputStream) KettleException(org.pentaho.di.core.exception.KettleException) KettleStepException(org.pentaho.di.core.exception.KettleStepException)

Example 47 with ResultFile

use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.

the class RssOutput method WriteToFile.

private boolean WriteToFile(String title, String link, String description, Date Pubdate, String copyright, String imageTitle, String imageDescription, String imageLink, String imageUrl, String language, String author) {
    boolean retval = false;
    try {
        // Specify Filename
        String fileName = data.filename;
        // Set channel ...
        data.feed = new SyndFeedImpl();
        if (Utils.isEmpty(meta.getVersion())) {
            data.feed.setFeedType("rss_2.0");
        } else {
            data.feed.setFeedType(meta.getVersion());
        }
        // Set encoding ...
        if (Utils.isEmpty(meta.getEncoding())) {
            data.feed.setEncoding("iso-8859-1");
        } else {
            data.feed.setEncoding(meta.getEncoding());
        }
        if (title != null) {
            data.feed.setTitle(title);
        }
        if (link != null) {
            data.feed.setLink(link);
        }
        if (description != null) {
            data.feed.setDescription(description);
        }
        if (Pubdate != null) {
            // data.dateParser.parse(Pubdate.toString()));
            data.feed.setPublishedDate(Pubdate);
        }
        // Set image ..
        if (meta.AddImage()) {
            SyndImage image = new SyndImageImpl();
            if (imageTitle != null) {
                image.setTitle(title);
            }
            if (imageLink != null) {
                image.setLink(link);
            }
            if (imageUrl != null) {
                image.setUrl(imageUrl);
            }
            if (imageDescription != null) {
                image.setDescription(imageDescription);
            }
            data.feed.setImage(image);
        }
        if (language != null) {
            data.feed.setLanguage(language);
        }
        if (copyright != null) {
            data.feed.setCopyright(copyright);
        }
        if (author != null) {
            data.feed.setAuthor(author);
        }
        // Add entries
        data.feed.setEntries(data.entries);
        Writer writer = new FileWriter(fileName);
        SyndFeedOutput output = new SyndFeedOutput();
        output.output(data.feed, writer);
        writer.close();
        if (meta.AddToResult()) {
            // Add this to the result file names...
            ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(fileName, getTransMeta()), getTransMeta().getName(), getStepname());
            resultFile.setComment("This file was created with a RSS Output step");
            addResultFile(resultFile);
        }
        if (log.isDetailed()) {
            logDetailed(BaseMessages.getString(PKG, "RssOutput.Log.CreatingFileOK", fileName));
        }
        retval = true;
    } catch (Exception e) {
        logError(BaseMessages.getString(PKG, "RssOutput.Log.ErrorCreatingFile", e.toString()));
        setErrors(1);
        retval = false;
    }
    return retval;
}
Also used : SyndImageImpl(com.sun.syndication.feed.synd.SyndImageImpl) FileWriter(java.io.FileWriter) SyndImage(com.sun.syndication.feed.synd.SyndImage) SyndFeedImpl(com.sun.syndication.feed.synd.SyndFeedImpl) SyndFeedOutput(com.sun.syndication.io.SyndFeedOutput) ResultFile(org.pentaho.di.core.ResultFile) XMLWriter(org.dom4j.io.XMLWriter) FileWriter(java.io.FileWriter) Writer(java.io.Writer) KettleException(org.pentaho.di.core.exception.KettleException) KettleStepException(org.pentaho.di.core.exception.KettleStepException)

Example 48 with ResultFile

use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.

the class SasInput method processRow.

@Override
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
    meta = (SasInputMeta) smi;
    data = (SasInputData) sdi;
    final Object[] fileRowData = getRow();
    if (fileRowData == null) {
        // No more work to do...
        // 
        setOutputDone();
        return false;
    }
    // 
    if (first) {
        // The output row meta data, what does it look like?
        // 
        data.outputRowMeta = new RowMeta();
        // See if the input row contains the filename field...
        // 
        int idx = getInputRowMeta().indexOfValue(meta.getAcceptingField());
        if (idx < 0) {
            throw new KettleException(BaseMessages.getString(PKG, "SASInput.Log.Error.UnableToFindFilenameField", meta.getAcceptingField()));
        }
        // Determine the output row layout
        // 
        data.outputRowMeta = getInputRowMeta().clone();
        meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
    }
    String rawFilename = getInputRowMeta().getString(fileRowData, meta.getAcceptingField(), null);
    final String filename = KettleVFS.getFilename(KettleVFS.getFileObject(rawFilename));
    data.helper = new SasInputHelper(filename);
    logBasic(BaseMessages.getString(PKG, "SASInput.Log.OpenedSASFile") + " : [" + data.helper + "]");
    // 
    if (data.fileLayout == null) {
        data.fileLayout = data.helper.getRowMeta();
    } else {
        // 
        if (data.fileLayout.size() != data.helper.getRowMeta().size()) {
            throw new KettleException("All input files need to have the same number of fields. File '" + filename + "' has " + data.helper.getRowMeta().size() + " fields while the first file only had " + data.fileLayout.size());
        }
        for (int i = 0; i < data.fileLayout.size(); i++) {
            ValueMetaInterface first = data.fileLayout.getValueMeta(i);
            ValueMetaInterface second = data.helper.getRowMeta().getValueMeta(i);
            if (!first.getName().equalsIgnoreCase(second.getName())) {
                throw new KettleException("Field nr " + i + " in file '" + filename + "' is called '" + second.getName() + "' while it was called '" + first.getName() + "' in the first file");
            }
            if (first.getType() != second.getType()) {
                throw new KettleException("Field nr " + i + " in file '" + filename + "' is of data type '" + second.getTypeDesc() + "' while it was '" + first.getTypeDesc() + "' in the first file");
            }
        }
    }
    // 
    if (first) {
        first = false;
        data.fieldIndexes = new ArrayList<Integer>();
        for (SasInputField field : meta.getOutputFields()) {
            int fieldIndex = data.fileLayout.indexOfValue(field.getName());
            if (fieldIndex < 0) {
                throw new KettleException("Selected field '" + field.getName() + "' couldn't be found in file '" + filename + "'");
            }
            data.fieldIndexes.add(fieldIndex);
        }
    }
    // Add this to the result file names...
    // 
    ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(filename), getTransMeta().getName(), getStepname());
    resultFile.setComment(BaseMessages.getString(PKG, "SASInput.ResultFile.Comment"));
    addResultFile(resultFile);
    SasReader sasReader = new SasReader(new File(filename));
    sasReader.read(new SasReaderCallback() {

        private boolean firstRead = true;

        @Override
        public void column(int index, String name, String label, SasColumnType type, int length) {
        }

        @Override
        public boolean readData() {
            return true;
        }

        @Override
        public boolean row(int rowNumber, Object[] rowData) {
            try {
                // 
                if (firstRead) {
                    firstRead = false;
                } else {
                    if (rowNumber == 1) {
                        return false;
                    }
                }
                Object[] row = RowDataUtil.createResizedCopy(fileRowData, data.outputRowMeta.size());
                // Only pick those fields that we're interested in.
                // 
                int outputIndex = getInputRowMeta().size();
                for (int i = 0; i < data.fieldIndexes.size(); i++) {
                    int fieldIndex = data.fieldIndexes.get(i);
                    int type = data.fileLayout.getValueMeta(fieldIndex).getType();
                    switch(type) {
                        case ValueMetaInterface.TYPE_STRING:
                            row[outputIndex++] = rowData[fieldIndex];
                            break;
                        case ValueMetaInterface.TYPE_NUMBER:
                            Double value = (Double) rowData[fieldIndex];
                            if (value.equals(Double.NaN)) {
                                value = null;
                            }
                            row[outputIndex++] = value;
                            break;
                        default:
                            throw new RuntimeException("Unhandled data type '" + ValueMetaFactory.getValueMetaName(type));
                    }
                }
                // Convert the data type of the new data to the requested data types
                // 
                convertData(data.fileLayout, row, data.outputRowMeta);
                // Pass along the row to further steps...
                // 
                putRow(data.outputRowMeta, row);
                return !isStopped();
            } catch (Exception e) {
                throw new RuntimeException("There was an error reading from SAS7BAT file '" + filename + "'", e);
            }
        }
    });
    return true;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) RowMeta(org.pentaho.di.core.row.RowMeta) ResultFile(org.pentaho.di.core.ResultFile) KettleException(org.pentaho.di.core.exception.KettleException) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface) SasColumnType(org.eobjects.sassy.SasColumnType) SasReader(org.eobjects.sassy.SasReader) File(java.io.File) ResultFile(org.pentaho.di.core.ResultFile) SasReaderCallback(org.eobjects.sassy.SasReaderCallback)

Example 49 with ResultFile

use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.

the class LoadFileInput method addFileToResultFilesName.

private void addFileToResultFilesName(FileObject file) throws Exception {
    if (meta.getAddResultFile()) {
        // Add this to the result file names...
        ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, file, getTransMeta().getName(), getStepname());
        resultFile.setComment("File was read by a LoadFileInput step");
        addResultFile(resultFile);
    }
}
Also used : ResultFile(org.pentaho.di.core.ResultFile)

Example 50 with ResultFile

use of org.pentaho.di.core.ResultFile in project pentaho-kettle by pentaho.

the class ExcelInput method getRowFromWorkbooks.

public Object[] getRowFromWorkbooks() {
    // This procedure outputs a single Excel data row on the destination
    // rowsets...
    Object[] retval = null;
    try {
        // First, see if a file has been opened?
        if (data.workbook == null) {
            // Open a new openFile..
            data.file = data.files.getFile(data.filenr);
            data.filename = KettleVFS.getFilename(data.file);
            // Add additional fields?
            if (meta.getShortFileNameField() != null && meta.getShortFileNameField().length() > 0) {
                data.shortFilename = data.file.getName().getBaseName();
            }
            if (meta.getPathField() != null && meta.getPathField().length() > 0) {
                data.path = KettleVFS.getFilename(data.file.getParent());
            }
            if (meta.isHiddenField() != null && meta.isHiddenField().length() > 0) {
                data.hidden = data.file.isHidden();
            }
            if (meta.getExtensionField() != null && meta.getExtensionField().length() > 0) {
                data.extension = data.file.getName().getExtension();
            }
            if (meta.getLastModificationDateField() != null && meta.getLastModificationDateField().length() > 0) {
                data.lastModificationDateTime = new Date(data.file.getContent().getLastModifiedTime());
            }
            if (meta.getUriField() != null && meta.getUriField().length() > 0) {
                data.uriName = data.file.getName().getURI();
            }
            if (meta.getRootUriField() != null && meta.getRootUriField().length() > 0) {
                data.rootUriName = data.file.getName().getRootURI();
            }
            if (meta.getSizeField() != null && meta.getSizeField().length() > 0) {
                data.size = new Long(data.file.getContent().getSize());
            }
            if (meta.isAddResultFile()) {
                ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, data.file, getTransMeta().getName(), toString());
                resultFile.setComment(BaseMessages.getString(PKG, "ExcelInput.Log.FileReadByStep"));
                addResultFile(resultFile);
            }
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "ExcelInput.Log.OpeningFile", "" + data.filenr + " : " + data.filename));
            }
            fpis = new FileInputStream(data.filename);
            data.workbook = WorkbookFactory.getWorkbook(meta.getSpreadSheetType(), fpis, meta.getEncoding());
            data.errorHandler.handleFile(data.file);
            // Start at the first sheet again...
            data.sheetnr = 0;
            // 
            if (meta.readAllSheets()) {
                data.sheetNames = data.workbook.getSheetNames();
                data.startColumn = new int[data.sheetNames.length];
                data.startRow = new int[data.sheetNames.length];
                for (int i = 0; i < data.sheetNames.length; i++) {
                    data.startColumn[i] = data.defaultStartColumn;
                    data.startRow[i] = data.defaultStartRow;
                }
            }
        }
        boolean nextsheet = false;
        // What sheet were we handling?
        if (log.isDebug()) {
            logDetailed(BaseMessages.getString(PKG, "ExcelInput.Log.GetSheet", "" + data.filenr + "." + data.sheetnr));
        }
        String sheetName = data.sheetNames[data.sheetnr];
        KSheet sheet = data.workbook.getSheet(sheetName);
        if (sheet != null) {
            // at what row do we continue reading?
            if (data.rownr < 0) {
                data.rownr = data.startRow[data.sheetnr];
                // Add an extra row if we have a header row to skip...
                if (meta.startsWithHeader()) {
                    data.rownr++;
                }
            }
            // Start at the specified column
            data.colnr = data.startColumn[data.sheetnr];
            // Build a new row and fill in the data from the sheet...
            try {
                KCell[] line = sheet.getRow(data.rownr);
                // Already increase cursor 1 row
                int lineNr = ++data.rownr;
                // Excel starts counting at 0
                if (!data.filePlayList.isProcessingNeeded(data.file, lineNr, sheetName)) {
                    // placeholder, was already null
                    retval = null;
                } else {
                    if (log.isRowLevel()) {
                        logRowlevel(BaseMessages.getString(PKG, "ExcelInput.Log.GetLine", "" + lineNr, data.filenr + "." + data.sheetnr));
                    }
                    if (log.isRowLevel()) {
                        logRowlevel(BaseMessages.getString(PKG, "ExcelInput.Log.ReadLineWith", "" + line.length));
                    }
                    ExcelInputRow excelInputRow = new ExcelInputRow(sheet.getName(), lineNr, line);
                    Object[] r = fillRow(data.colnr, excelInputRow);
                    if (log.isRowLevel()) {
                        logRowlevel(BaseMessages.getString(PKG, "ExcelInput.Log.ConvertedLinToRow", "" + lineNr, data.outputRowMeta.getString(r)));
                    }
                    boolean isEmpty = isLineEmpty(line);
                    if (!isEmpty || !meta.ignoreEmptyRows()) {
                        // Put the row
                        retval = r;
                    } else {
                        if (data.rownr > sheet.getRows()) {
                            nextsheet = true;
                        }
                    }
                    if (isEmpty && meta.stopOnEmpty()) {
                        nextsheet = true;
                    }
                }
            } catch (ArrayIndexOutOfBoundsException e) {
                if (log.isRowLevel()) {
                    logRowlevel(BaseMessages.getString(PKG, "ExcelInput.Log.OutOfIndex"));
                }
                // We tried to read below the last line in the sheet.
                // Go to the next sheet...
                nextsheet = true;
            }
        } else {
            nextsheet = true;
        }
        if (nextsheet) {
            // Go to the next sheet
            data.sheetnr++;
            // Reset the start-row:
            data.rownr = -1;
            // no previous row yet, don't take it from the previous sheet!
            // (that whould be plain wrong!)
            data.previousRow = null;
            // Perhaps it was the last sheet?
            if (data.sheetnr >= data.sheetNames.length) {
                jumpToNextFile();
            }
        }
    } catch (Exception e) {
        logError(BaseMessages.getString(PKG, "ExcelInput.Error.ProcessRowFromExcel", data.filename + "", e.toString()), e);
        setErrors(1);
        stopAll();
        return null;
    }
    return retval;
}
Also used : KSheet(org.pentaho.di.core.spreadsheet.KSheet) ResultFile(org.pentaho.di.core.ResultFile) KCell(org.pentaho.di.core.spreadsheet.KCell) Date(java.util.Date) FileInputStream(java.io.FileInputStream) KettleException(org.pentaho.di.core.exception.KettleException) KettleFileException(org.pentaho.di.core.exception.KettleFileException) IOException(java.io.IOException) FileObject(org.apache.commons.vfs2.FileObject)

Aggregations

ResultFile (org.pentaho.di.core.ResultFile)83 KettleException (org.pentaho.di.core.exception.KettleException)65 FileObject (org.apache.commons.vfs2.FileObject)32 IOException (java.io.IOException)29 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)29 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)28 Result (org.pentaho.di.core.Result)20 KettleFileException (org.pentaho.di.core.exception.KettleFileException)16 KettleStepException (org.pentaho.di.core.exception.KettleStepException)12 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)11 File (java.io.File)10 OutputStream (java.io.OutputStream)10 Date (java.util.Date)9 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)6 FileInputStream (java.io.FileInputStream)5 KettleValueException (org.pentaho.di.core.exception.KettleValueException)5 ArrayList (java.util.ArrayList)4 Matcher (java.util.regex.Matcher)4 Pattern (java.util.regex.Pattern)4 KettleExtensionPoint (org.pentaho.di.core.extension.KettleExtensionPoint)4