Search in sources :

Example 36 with Type

use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.

the class ParGzipCsvInput method processRow.

public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
    meta = (ParGzipCsvInputMeta) smi;
    data = (ParGzipCsvInputData) sdi;
    if (first) {
        first = false;
        data.outputRowMeta = new RowMeta();
        meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
        if (data.filenames == null) {
            // We're expecting the list of filenames from the previous step(s)...
            // 
            getFilenamesFromPreviousSteps();
        }
        // We only run in parallel if we have at least one file to process
        // AND if we have more than one step copy running...
        // 
        data.parallel = meta.isRunningInParallel() && data.totalNumberOfSteps > 1;
        // The conversion logic for when the lazy conversion is turned of is simple:
        // Pretend it's a lazy conversion object anyway and get the native type during conversion.
        // 
        data.convertRowMeta = data.outputRowMeta.clone();
        for (ValueMetaInterface valueMeta : data.convertRowMeta.getValueMetaList()) {
            valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
        }
        // Calculate the indexes for the filename and row number fields
        // 
        data.filenameFieldIndex = -1;
        if (!Utils.isEmpty(meta.getFilenameField()) && meta.isIncludingFilename()) {
            data.filenameFieldIndex = meta.getInputFields().length;
        }
        data.rownumFieldIndex = -1;
        if (!Utils.isEmpty(meta.getRowNumField())) {
            data.rownumFieldIndex = meta.getInputFields().length;
            if (data.filenameFieldIndex >= 0) {
                data.rownumFieldIndex++;
            }
        }
        // Open the next file...
        // 
        boolean opened = false;
        while (data.filenr < data.filenames.length) {
            if (openNextFile()) {
                opened = true;
                break;
            }
        }
        if (!opened) {
            // last file, end here
            setOutputDone();
            return false;
        }
    }
    // get row, set busy!
    Object[] outputRowData = readOneRow(true);
    if (outputRowData == null) {
        if (skipToNextBlock()) {
            // If we need to open a new file, make sure we don't stop when we get a false from the openNextFile() algorithm.
            // It can also mean that the file is smaller than the block size
            // In that case, check the file number and retry until we get a valid file position to work with.
            // 
            boolean opened = false;
            while (data.filenr < data.filenames.length) {
                if (openNextFile()) {
                    opened = true;
                    break;
                }
            }
            if (opened) {
                // try again on the next loop in the next file...
                return true;
            } else {
                incrementLinesUpdated();
                // last file, end here
                setOutputDone();
                return false;
            }
        } else {
            // try again on the next loop in the next block...
            return true;
        }
    } else {
        // copy row to possible alternate rowset(s).
        putRow(data.outputRowMeta, outputRowData);
        if (checkFeedback(getLinesInput())) {
            if (log.isBasic()) {
                logBasic(BaseMessages.getString(PKG, "ParGzipCsvInput.Log.LineNumber", Long.toString(getLinesInput())));
            }
        }
    }
    return true;
}
Also used : RowMeta(org.pentaho.di.core.row.RowMeta) FileObject(org.apache.commons.vfs2.FileObject) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface)

Example 37 with Type

use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.

the class RepositoryExporter method exportAllObjectsInternal.

private synchronized List<ExportFeedback> exportAllObjectsInternal(ProgressMonitorListener monitorOuter, String xmlFilename, RepositoryDirectoryInterface root, String exportType, boolean feedback) throws KettleException {
    this.feedbackList.clear();
    // deal with monitor
    ProgressMonitorDecorator monitor;
    if (monitorOuter == null) {
        monitor = new ProgressMonitorDecorator(new ProgressNullMonitorListener());
    } else {
        monitor = new ProgressMonitorDecorator(monitorOuter);
    }
    monitor.beginTask(BaseMessages.getString(PKG, "Repository.Exporter.Monitor.BeginTask"), 104);
    FileObject output = KettleVFS.getFileObject(xmlFilename);
    ExportFeedback feed = new ExportFeedback();
    feed.setItemName(BaseMessages.getString(PKG, "Repository.Exporter.Feedback.CreateExportFile", xmlFilename));
    feed.setSimpleString(true);
    this.feedbackList.add(feed);
    ExportWriter writer = null;
    try {
        // prepare export
        writer = new ExportWriter(output);
        monitor.worked(4);
        monitor.subTask(BaseMessages.getString(PKG, "Repository.Exporter.Monitor.ConnectToRepository"));
        root = ((null == root) ? repository.loadRepositoryDirectoryTree() : root);
        ExportType type = ExportType.valueOf(exportType.toUpperCase());
        switch(type) {
            case ALL:
                {
                    exportTransformations(monitor, root, writer, feedback);
                    monitor.worked(50);
                    exportJobs(monitor, root, writer, feedback);
                    monitor.worked(50);
                    break;
                }
            case TRANS:
                {
                    exportTransformations(monitor, root, writer, feedback);
                    monitor.worked(100);
                    break;
                }
            case JOBS:
                {
                    exportJobs(monitor, root, writer, feedback);
                    monitor.worked(100);
                    break;
                }
            default:
                {
                    // this will never happens
                    throw new KettleException("Unsupported export type: " + type);
                }
        }
        monitor.subTask(BaseMessages.getString(PKG, "Repository.Exporter.Monitor.SavingResultFile"));
    } finally {
        try {
            if (writer != null) {
                writer.close();
            }
        } catch (Exception e) {
            log.logDebug(BaseMessages.getString(PKG, "Repository.Exporter.Exception.CloseExportFile", xmlFilename));
        }
    }
    if (monitor != null) {
        monitor.done();
    }
    return this.feedbackList;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) FileObject(org.apache.commons.vfs2.FileObject) ProgressNullMonitorListener(org.pentaho.di.core.ProgressNullMonitorListener) KettleException(org.pentaho.di.core.exception.KettleException) FileSystemException(org.apache.commons.vfs2.FileSystemException) IOException(java.io.IOException) UnsupportedEncodingException(java.io.UnsupportedEncodingException)

Example 38 with Type

use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.

the class ExcelWriterStepTest method testBase.

/**
 * <p>The base for testing if a field of a specific type is correctly handled.</p>
 *
 * @param vmi               {@link ValueMetaInterface}'s instance to be used
 * @param vObj              the {@link Object} to be used as the value
 * @param extension         the extension to be used
 * @param isStreaming       if it's to use streaming
 * @param isTemplateEnabled if it's to use a template
 */
private void testBase(ValueMetaInterface vmi, Object vObj, String extension, String dotExtension, boolean isStreaming, boolean isTemplateEnabled) throws Exception {
    Object[] vObjArr = { vObj };
    assertTrue(step.init(metaMock, dataMock));
    File tempFile = File.createTempFile(extension, dotExtension);
    tempFile.deleteOnExit();
    String path = tempFile.getAbsolutePath();
    if (isTemplateEnabled) {
        dataMock.realTemplateFileName = getClass().getResource("template_test" + dotExtension).getFile();
    }
    dataMock.fieldnrs = new int[] { 0 };
    dataMock.linkfieldnrs = new int[] { -1 };
    dataMock.commentfieldnrs = new int[] { -1 };
    dataMock.createNewFile = true;
    dataMock.realSheetname = SHEET_NAME;
    dataMock.inputRowMeta = mock(RowMetaInterface.class);
    doReturn(path).when(step).buildFilename(0);
    doReturn(isTemplateEnabled).when(metaMock).isTemplateEnabled();
    doReturn(isStreaming).when(metaMock).isStreamingData();
    doReturn(false).when(metaMock).isHeaderEnabled();
    doReturn(extension).when(metaMock).getExtension();
    ExcelWriterStepField field = new ExcelWriterStepField();
    doReturn(new ExcelWriterStepField[] { field }).when(metaMock).getOutputFields();
    doReturn(1).when(dataMock.inputRowMeta).size();
    doReturn(vmi).when(dataMock.inputRowMeta).getValueMeta(anyInt());
    step.prepareNextOutputFile();
    assertNull(dataMock.sheet.getRow(1));
    // Unfortunately HSSFSheet is final and cannot be mocked, so we'll skip some validations
    dataMock.posY = 1;
    if (null != dataMock.sheet && !(dataMock.sheet instanceof HSSFSheet)) {
        dataMock.sheet = spy(dataMock.sheet);
    }
    step.writeNextLine(vObjArr);
    if (null != dataMock.sheet && !(dataMock.sheet instanceof HSSFSheet)) {
        verify(step).writeField(eq(vObj), eq(vmi), eq(field), any(Row.class), eq(0), any(), eq(0), eq(Boolean.FALSE));
        verify(dataMock.sheet).createRow(anyInt());
        verify(dataMock.sheet).getRow(1);
    }
    assertNotNull(dataMock.sheet.getRow(1));
}
Also used : Matchers.anyObject(org.mockito.Matchers.anyObject) FileObject(org.apache.commons.vfs2.FileObject) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) HSSFSheet(org.apache.poi.hssf.usermodel.HSSFSheet) ValueMetaString(org.pentaho.di.core.row.value.ValueMetaString) Row(org.apache.poi.ss.usermodel.Row) File(java.io.File)

Example 39 with Type

use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.

the class ScriptAddedFunctions method setVariable.

// Setting Variable
public static void setVariable(ScriptEngine actualContext, Bindings actualObject, Object[] ArgList, Object FunctionContext) {
    String sArg1 = "";
    String sArg2 = "";
    String sArg3 = "";
    if (ArgList.length == 3) {
        try {
            Object scmo = actualObject.get("_step_");
            Object scmO = scmo;
            if (scmO instanceof ScriptInterface) {
                ScriptInterface scm = (ScriptInterface) scmO;
                sArg1 = (String) ArgList[0];
                sArg2 = (String) ArgList[1];
                sArg3 = (String) ArgList[2];
                if ("s".equals(sArg3)) {
                    // System wide properties
                    System.setProperty(sArg1, sArg2);
                    // Set also all the way to the root as else we will take
                    // stale values
                    scm.setVariable(sArg1, sArg2);
                    VariableSpace parentSpace = scm.getParentVariableSpace();
                    while (parentSpace != null) {
                        parentSpace.setVariable(sArg1, sArg2);
                        parentSpace = parentSpace.getParentVariableSpace();
                    }
                } else if ("r".equals(sArg3)) {
                    // Upto the root... this should be the default.
                    scm.setVariable(sArg1, sArg2);
                    VariableSpace parentSpace = scm.getParentVariableSpace();
                    while (parentSpace != null) {
                        parentSpace.setVariable(sArg1, sArg2);
                        parentSpace = parentSpace.getParentVariableSpace();
                    }
                } else if ("p".equals(sArg3)) {
                    // Upto the parent
                    scm.setVariable(sArg1, sArg2);
                    VariableSpace parentSpace = scm.getParentVariableSpace();
                    if (parentSpace != null) {
                        parentSpace.setVariable(sArg1, sArg2);
                    }
                } else if ("g".equals(sArg3)) {
                    // Upto the grand parent
                    scm.setVariable(sArg1, sArg2);
                    VariableSpace parentSpace = scm.getParentVariableSpace();
                    if (parentSpace != null) {
                        parentSpace.setVariable(sArg1, sArg2);
                        VariableSpace grandParentSpace = parentSpace.getParentVariableSpace();
                        if (grandParentSpace != null) {
                            grandParentSpace.setVariable(sArg1, sArg2);
                        }
                    }
                } else {
                    throw new RuntimeException("The argument type of function call setVariable should either be \"s\", \"r\", \"p\", or \"g\".");
                }
            }
        // Else: Ignore for now... if we're executing via the Test Button
        } catch (Exception e) {
            throw new RuntimeException(e.toString());
        }
    } else {
        throw new RuntimeException("The function call setVariable requires 3 arguments.");
    }
}
Also used : VariableSpace(org.pentaho.di.core.variables.VariableSpace) FileObject(org.apache.commons.vfs2.FileObject) KettleFileException(org.pentaho.di.core.exception.KettleFileException) ScriptException(javax.script.ScriptException) IOException(java.io.IOException)

Example 40 with Type

use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.

the class SortRows method processRow.

@Override
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
    // wait for first for is available
    Object[] r = getRow();
    List<String> groupFields = null;
    if (first) {
        this.first = false;
        // do we have any row at start processing?
        if (r == null) {
            // seems that we don't
            this.setOutputDone();
            return false;
        }
        RowMetaInterface inputRowMeta = getInputRowMeta();
        // do we have group numbers?
        if (meta.isGroupSortEnabled()) {
            data.newBatch = true;
            // we do set exact list instead of null
            groupFields = meta.getGroupFields();
            data.groupnrs = new int[groupFields.size()];
            for (int i = 0; i < groupFields.size(); i++) {
                data.groupnrs[i] = inputRowMeta.indexOfValue(groupFields.get(i));
                if (data.groupnrs[i] < 0) {
                    logError(BaseMessages.getString(PKG, "SortRows.Error.PresortedFieldNotFound", groupFields.get(i)));
                    setErrors(1);
                    stopAll();
                    return false;
                }
            }
        }
        String[] fieldNames = meta.getFieldName();
        data.fieldnrs = new int[fieldNames.length];
        List<Integer> toConvert = new ArrayList<Integer>();
        // Metadata
        data.outputRowMeta = inputRowMeta.clone();
        meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
        data.comparator = new RowTemapFileComparator(data.outputRowMeta, data.fieldnrs);
        for (int i = 0; i < fieldNames.length; i++) {
            data.fieldnrs[i] = inputRowMeta.indexOfValue(fieldNames[i]);
            if (data.fieldnrs[i] < 0) {
                throw new KettleException(BaseMessages.getString(PKG, "SortRowsMeta.CheckResult.StepFieldNotInInputStream", meta.getFieldName()[i], getStepname()));
            }
            // do we need binary conversion for this type?
            if (inputRowMeta.getValueMeta(data.fieldnrs[i]).isStorageBinaryString()) {
                toConvert.add(data.fieldnrs[i]);
            }
        }
        data.convertKeysToNative = toConvert.isEmpty() ? null : new int[toConvert.size()];
        int i = 0;
        for (Integer in : toConvert) {
            data.convertKeysToNative[i] = in;
            i++;
        }
        data.rowComparator = new RowObjectArrayComparator(data.outputRowMeta, data.fieldnrs);
    }
    // it is not first row and it is null
    if (r == null) {
        // flush result and set output done.
        this.preSortBeforeFlush();
        this.passBuffer();
        this.setOutputDone();
        return false;
    }
    // if Group Sort is not enabled then do the normal sort.
    if (!meta.isGroupSortEnabled()) {
        this.addBuffer(getInputRowMeta(), r);
    } else {
        // Otherwise do grouping sort
        if (data.newBatch) {
            data.newBatch = false;
            setPrevious(r);
            // this enables Sort stuff to initialize it's state.
            this.addBuffer(getInputRowMeta(), r);
        } else {
            if (this.sameGroup(data.previous, r)) {
                // setPrevious( r ); // we are not need to set it every time
                // this performs SortRows normal row collection functionality.
                this.addBuffer(getInputRowMeta(), r);
            } else {
                this.preSortBeforeFlush();
                // flush sorted block to next step:
                this.passBuffer();
                // new sorted block beginning
                setPrevious(r);
                data.newBatch = true;
                this.addBuffer(getInputRowMeta(), r);
            }
        }
    }
    if (checkFeedback(getLinesRead())) {
        if (log.isBasic()) {
            logBasic("Linenr " + getLinesRead());
        }
    }
    return true;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) ArrayList(java.util.ArrayList) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) FileObject(org.apache.commons.vfs2.FileObject)

Aggregations

FileObject (org.apache.commons.vfs2.FileObject)38 KettleException (org.pentaho.di.core.exception.KettleException)22 IOException (java.io.IOException)20 FileSystemException (org.apache.commons.vfs2.FileSystemException)12 ValueMetaInterface (org.pentaho.di.core.row.ValueMetaInterface)9 KettleFileException (org.pentaho.di.core.exception.KettleFileException)8 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)8 InputStream (java.io.InputStream)7 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)6 File (java.io.File)5 InputStreamReader (java.io.InputStreamReader)5 ArrayList (java.util.ArrayList)5 KettleStepException (org.pentaho.di.core.exception.KettleStepException)4 KettleValueException (org.pentaho.di.core.exception.KettleValueException)4 RowMeta (org.pentaho.di.core.row.RowMeta)4 UnsupportedEncodingException (java.io.UnsupportedEncodingException)3 HashMap (java.util.HashMap)3 Map (java.util.Map)3 TableItem (org.eclipse.swt.widgets.TableItem)3 MetaborgException (org.metaborg.core.MetaborgException)3