Search in sources :

Example 6 with MetaverseAnalyzerException

use of org.pentaho.metaverse.api.MetaverseAnalyzerException in project pentaho-metaverse by pentaho.

the class SelectValuesStepAnalyzer method getChangeRecords.

@Override
public Set<ComponentDerivationRecord> getChangeRecords(SelectValuesMeta selectValuesMeta) throws MetaverseAnalyzerException {
    validateState(null, selectValuesMeta);
    Set<ComponentDerivationRecord> changeRecords = new HashSet<ComponentDerivationRecord>();
    ComponentDerivationRecord changeRecord;
    // get a list of fields to be deleted, in lower case for case-insensitive comparisons
    final List<String> fieldDeletesLowerCase = selectValuesMeta.getDeleteName() == null ? new ArrayList<>() : Arrays.asList(selectValuesMeta.getDeleteName()).stream().map(String::toLowerCase).collect(Collectors.toList());
    // Changes form the 'Meta-data' tab
    final SelectMetadataChange[] metadataChanges = selectValuesMeta.getMeta();
    // Process the fields/tabs in the same order as the real step does
    if (!Const.isEmpty(selectValuesMeta.getSelectName())) {
        String[] fieldNames = selectValuesMeta.getSelectName();
        // rename fields from the 'Select & Alter' tab
        String[] fieldRenames = selectValuesMeta.getSelectRename();
        int[] fieldLength = selectValuesMeta.getSelectLength();
        int[] fieldPrecision = selectValuesMeta.getSelectPrecision();
        for (int i = 0; i < fieldNames.length; i++) {
            final String inputFieldName = fieldNames[i];
            final String outputFieldName = fieldRenames[i];
            // if the inputFieldName is being removed or renamed through the 'Select & Alter' tab or the 'Meta-data' tab,
            // DO NOT create a change record
            // Get a list of rename field names from the 'Meta-data' tab where the 'Fieldname' matches
            // (case-insensitive) inputFieldName, if the list is not empty, we know that inputFieldName is being renamed
            // via the 'Meta-data' tab and therefore we do not want to create a change record here, because it will be
            // addressed below, where the 'Meta-data' tab is being analyzed
            final List<String> metaRenameFieldsLowerCase = metadataChanges == null ? new ArrayList<>() : Arrays.asList(metadataChanges).stream().filter(change -> change.getName().equalsIgnoreCase(inputFieldName)).map(e -> e.getRename()).collect(Collectors.toList());
            if (StringUtils.isEmpty(outputFieldName) && (fieldDeletesLowerCase.contains(inputFieldName.toLowerCase()) || !CollectionUtils.isEmpty(metaRenameFieldsLowerCase))) {
                continue;
            }
            changeRecord = new ComponentDerivationRecord(inputFieldName, outputFieldName == null ? inputFieldName : outputFieldName, ChangeType.METADATA);
            final Set<String> metadataChangedFields = new HashSet<>();
            if (inputFieldName != null && outputFieldName != null && !inputFieldName.equals(outputFieldName)) {
                metadataChangedFields.add("name");
            }
            // Check for changes in field length
            if (fieldLength != null && fieldLength[i] != NOT_CHANGED) {
                metadataChangedFields.add("length");
            }
            // Check for changes in field precision
            if (fieldPrecision != null && fieldPrecision[i] != NOT_CHANGED) {
                metadataChangedFields.add("precision");
            }
            if (!metadataChangedFields.isEmpty()) {
                // Add all the changed metadata fields as a single operation
                changeRecord.addOperation(new Operation(DictionaryConst.PROPERTY_MODIFIED, StringUtils.join(metadataChangedFields, ",")));
            }
            changeRecords.add(changeRecord);
        }
    }
    if (!Const.isEmpty(selectValuesMeta.getMeta())) {
        String[] prevStepNames = parentTransMeta.getPrevStepNames(getStepName());
        if (metadataChanges != null) {
            for (SelectMetadataChange metadataChange : metadataChanges) {
                final String inputFieldName = metadataChange.getName();
                final String outputFieldName = metadataChange.getRename();
                // if the inputFieldName is being removed, DO NOT create a change record
                if (StringUtils.isEmpty(outputFieldName) && fieldDeletesLowerCase.contains(inputFieldName.toLowerCase())) {
                    continue;
                }
                changeRecord = new ComponentDerivationRecord(inputFieldName, outputFieldName == null ? inputFieldName : outputFieldName, ChangeType.METADATA);
                Set<String> metadataChangedFields = new HashSet<String>();
                // NOTE: We use equalsIgnoreCase instead of equals because that's how Select Values currently works
                if (inputFieldName != null && outputFieldName != null && !inputFieldName.equalsIgnoreCase(outputFieldName)) {
                    metadataChangedFields.add("name");
                }
                // Get the ValueMetaInterface for the input field, to determine if any of its metadata has changed
                if (prevFields == null) {
                    prevFields = getInputFields(selectValuesMeta);
                    if (prevFields == null) {
                        log.warn(Messages.getString("WARNING.CannotDetermineFieldType", inputFieldName));
                        continue;
                    }
                }
                RowMetaInterface rowMetaInterface = prevFields.get(prevStepNames[0]);
                ValueMetaInterface inputFieldValueMeta = null;
                if (rowMetaInterface == null) {
                    log.warn(Messages.getString("WARNING.CannotDetermineFieldType", inputFieldName));
                    continue;
                }
                inputFieldValueMeta = rowMetaInterface.searchValueMeta(inputFieldName);
                if (inputFieldValueMeta == null) {
                    log.warn(Messages.getString("WARNING.CannotDetermineFieldType", inputFieldName));
                    continue;
                }
                // Check for changes in field type
                if (inputFieldValueMeta.getType() != metadataChange.getType()) {
                    metadataChangedFields.add("type");
                }
                // Check for changes in field length
                if (metadataChange.getLength() != NOT_CHANGED) {
                    metadataChangedFields.add("length");
                }
                // Check for changes in field precision
                if (metadataChange.getPrecision() != NOT_CHANGED) {
                    metadataChangedFields.add("precision");
                }
                // Check for changes in storage type (binary to string, e.g.)
                if ((metadataChange.getStorageType() != -1) && (inputFieldValueMeta.getStorageType() != metadataChange.getStorageType())) {
                    metadataChangedFields.add("storagetype");
                }
                // Check for changes in conversion mask
                if ((metadataChange.getConversionMask() != null) && (inputFieldValueMeta.getConversionMask() == null || !inputFieldValueMeta.getConversionMask().equals(metadataChange.getConversionMask()))) {
                    metadataChangedFields.add("conversionmask");
                }
                // Check for changes in date format leniency
                if (inputFieldValueMeta.isDateFormatLenient() != metadataChange.isDateFormatLenient()) {
                    metadataChangedFields.add("dateformatlenient");
                }
                // Check for changes in date format locale
                if ((metadataChange.getDateFormatLocale() != null) && (inputFieldValueMeta.getDateFormatLocale() == null || !inputFieldValueMeta.getDateFormatLocale().toString().equals(metadataChange.getDateFormatLocale()))) {
                    metadataChangedFields.add("datelocale");
                }
                // Check for changes in date format locale
                if ((metadataChange.getDateFormatTimeZone() != null) && (inputFieldValueMeta.getDateFormatTimeZone() == null || !inputFieldValueMeta.getDateFormatTimeZone().toString().equals(metadataChange.getDateFormatTimeZone()))) {
                    metadataChangedFields.add("datetimezone");
                }
                // Check for changes in date format locale
                if (inputFieldValueMeta.isLenientStringToNumber() != metadataChange.isLenientStringToNumber()) {
                    metadataChangedFields.add("lenientnumberconversion");
                }
                // Check for changes in encoding
                if ((metadataChange.getDateFormatTimeZone() != null) && (inputFieldValueMeta.getStringEncoding() == null || !inputFieldValueMeta.getStringEncoding().equals(metadataChange.getDateFormatTimeZone()))) {
                    metadataChangedFields.add("encoding");
                }
                // Check for changes in encoding
                if ((metadataChange.getDecimalSymbol() != null) && (inputFieldValueMeta.getDecimalSymbol() == null || !inputFieldValueMeta.getDecimalSymbol().equals(metadataChange.getDecimalSymbol()))) {
                    metadataChangedFields.add("decimalsymbol");
                }
                // Check for changes in encoding
                if ((metadataChange.getGroupingSymbol() != null) && (inputFieldValueMeta.getGroupingSymbol() == null || !inputFieldValueMeta.getGroupingSymbol().equals(metadataChange.getGroupingSymbol()))) {
                    metadataChangedFields.add("groupsymbol");
                }
                // Check for changes in encoding
                if ((metadataChange.getCurrencySymbol() != null) && (inputFieldValueMeta.getCurrencySymbol() == null || !inputFieldValueMeta.getCurrencySymbol().equals(metadataChange.getCurrencySymbol()))) {
                    metadataChangedFields.add("currencysymbol");
                }
                if (!metadataChangedFields.isEmpty()) {
                    // Add all the changed metadata fields as a single operation
                    changeRecord.addOperation(new Operation(DictionaryConst.PROPERTY_MODIFIED, StringUtils.join(metadataChangedFields, ",")));
                }
                changeRecords.add(changeRecord);
            }
        }
    }
    return changeRecords;
}
Also used : StringUtils(org.apache.commons.lang.StringUtils) Arrays(java.util.Arrays) IMetaverseNode(org.pentaho.metaverse.api.IMetaverseNode) LoggerFactory(org.slf4j.LoggerFactory) StepField(org.pentaho.metaverse.api.StepField) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) TransMeta(org.pentaho.di.trans.TransMeta) Const(org.pentaho.di.core.Const) CollectionUtils(org.apache.commons.collections.CollectionUtils) StepAnalyzer(org.pentaho.metaverse.api.analyzer.kettle.step.StepAnalyzer) SelectValuesMeta(org.pentaho.di.trans.steps.selectvalues.SelectValuesMeta) Logger(org.slf4j.Logger) SelectMetadataChange(org.pentaho.di.trans.steps.selectvalues.SelectMetadataChange) ComponentDerivationRecord(org.pentaho.metaverse.api.analyzer.kettle.ComponentDerivationRecord) Set(java.util.Set) Collectors(java.util.stream.Collectors) IClonableStepAnalyzer(org.pentaho.metaverse.api.analyzer.kettle.step.IClonableStepAnalyzer) ChangeType(org.pentaho.metaverse.api.ChangeType) DictionaryConst(org.pentaho.dictionary.DictionaryConst) List(java.util.List) Messages(org.pentaho.metaverse.messages.Messages) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) BaseStepMeta(org.pentaho.di.trans.step.BaseStepMeta) Operation(org.pentaho.metaverse.api.model.Operation) ArrayUtils(org.apache.commons.lang.ArrayUtils) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) Operation(org.pentaho.metaverse.api.model.Operation) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface) ComponentDerivationRecord(org.pentaho.metaverse.api.analyzer.kettle.ComponentDerivationRecord) SelectMetadataChange(org.pentaho.di.trans.steps.selectvalues.SelectMetadataChange) HashSet(java.util.HashSet)

Example 7 with MetaverseAnalyzerException

use of org.pentaho.metaverse.api.MetaverseAnalyzerException in project pentaho-metaverse by pentaho.

the class JobJobEntryAnalyzer method customAnalyze.

@Override
protected void customAnalyze(JobEntryJob entry, IMetaverseNode rootNode) throws MetaverseAnalyzerException {
    JobMeta subJobMeta = null;
    JobMeta parentJobMeta = entry.getParentJob().getJobMeta();
    // For some reason the JobMeta's variables have been reset by now, so re-activate them
    parentJobMeta.activateParameters();
    Repository repo = parentJobMeta.getRepository();
    String jobPath = null;
    MetaverseAnalyzerException exception = null;
    switch(entry.getSpecificationMethod()) {
        case FILENAME:
            try {
                jobPath = parentJobMeta.environmentSubstitute(entry.getFilename());
                String normalized = KettleAnalyzerUtil.normalizeFilePath(jobPath);
                subJobMeta = getSubJobMeta(normalized);
                jobPath = normalized;
            } catch (Exception e) {
                exception = new MetaverseAnalyzerException(Messages.getString("ERROR.SubJobNotFoundInParentJob", jobPath, parentJobMeta.toString()), e);
            }
            break;
        case REPOSITORY_BY_NAME:
            if (repo != null) {
                String dir = parentJobMeta.environmentSubstitute(entry.getDirectory());
                String file = parentJobMeta.environmentSubstitute(entry.getJobName());
                try {
                    RepositoryDirectoryInterface rdi = repo.findDirectory(dir);
                    subJobMeta = repo.loadJob(file, rdi, null, null);
                    String filename = subJobMeta.getFilename() == null ? subJobMeta.toString() : subJobMeta.getFilename();
                    jobPath = filename + "." + subJobMeta.getDefaultExtension();
                } catch (KettleException e) {
                    exception = new MetaverseAnalyzerException(Messages.getString("ERROR.SubJobNotFoundInParentJob", file, parentJobMeta.toString()), e);
                }
            } else {
                exception = new MetaverseAnalyzerException(Messages.getString("ERROR.MissingConnectionForJobSubJob", parentJobMeta.toString()));
            }
            break;
        case REPOSITORY_BY_REFERENCE:
            if (repo != null) {
                try {
                    subJobMeta = repo.loadJob(entry.getJobObjectId(), null);
                    String filename = subJobMeta.getFilename() == null ? subJobMeta.toString() : subJobMeta.getFilename();
                    jobPath = filename + "." + subJobMeta.getDefaultExtension();
                } catch (KettleException e) {
                    exception = new MetaverseAnalyzerException(Messages.getString("ERROR.SubJobsNotFoundInParentJob", (entry.getJobObjectId() == null ? "N/A" : entry.getJobObjectId().toString()), parentJobMeta.toString()), e);
                }
            } else {
                exception = new MetaverseAnalyzerException(Messages.getString("ERROR.MissingConnectionForJobSubJob", parentJobMeta.toString()));
            }
            break;
    }
    rootNode.setProperty(DictionaryConst.PROPERTY_PATH, jobPath);
    if (exception != null) {
        throw exception;
    }
    subJobMeta.copyVariablesFrom(parentJobMeta);
    subJobMeta.setFilename(jobPath);
    IComponentDescriptor ds = new MetaverseComponentDescriptor(subJobMeta.getName(), DictionaryConst.NODE_TYPE_JOB, descriptor.getNamespace().getParentNamespace());
    IMetaverseNode jobNode = createNodeFromDescriptor(ds);
    jobNode.setProperty(DictionaryConst.PROPERTY_NAMESPACE, ds.getNamespaceId());
    jobNode.setProperty(DictionaryConst.PROPERTY_PATH, jobPath);
    jobNode.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DOCUMENT);
    metaverseBuilder.addLink(rootNode, DictionaryConst.LINK_EXECUTES, jobNode);
    // pull in the sub-job lineage only if the consolidateSubGraphs flag is set to true
    if (MetaverseConfig.consolidateSubGraphs()) {
        final IDocument subTransDocument = KettleAnalyzerUtil.buildDocument(getMetaverseBuilder(), subJobMeta, jobPath, getDocumentDescriptor().getNamespace());
        if (subTransDocument != null) {
            final IComponentDescriptor subtransDocumentDescriptor = new MetaverseComponentDescriptor(subTransDocument.getStringID(), DictionaryConst.NODE_TYPE_TRANS, getDocumentDescriptor().getNamespace(), getDescriptor().getContext());
            // analyze the sub-job
            getDocumentAnalyzer().analyze(subtransDocumentDescriptor, subJobMeta, jobNode, jobPath);
        }
    }
}
Also used : RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) Repository(org.pentaho.di.repository.Repository) IComponentDescriptor(org.pentaho.metaverse.api.IComponentDescriptor) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) IMetaverseNode(org.pentaho.metaverse.api.IMetaverseNode) KettleException(org.pentaho.di.core.exception.KettleException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleMissingPluginsException(org.pentaho.di.core.exception.KettleMissingPluginsException) FileNotFoundException(java.io.FileNotFoundException) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) IDocument(org.pentaho.metaverse.api.IDocument) MetaverseComponentDescriptor(org.pentaho.metaverse.api.MetaverseComponentDescriptor)

Example 8 with MetaverseAnalyzerException

use of org.pentaho.metaverse.api.MetaverseAnalyzerException in project pentaho-metaverse by pentaho.

the class JobExecutorStepAnalyzer method customAnalyze.

@Override
protected void customAnalyze(JobExecutorMeta meta, IMetaverseNode node) throws MetaverseAnalyzerException {
    String jobPath = meta.getFileName();
    JobMeta subJobMeta = null;
    Repository repo = parentTransMeta.getRepository();
    MetaverseAnalyzerException exception = null;
    switch(meta.getSpecificationMethod()) {
        case FILENAME:
            jobPath = parentTransMeta.environmentSubstitute(meta.getFileName());
            try {
                String normalized = KettleAnalyzerUtil.normalizeFilePath(jobPath);
                subJobMeta = getSubJobMeta(parentTransMeta, normalized);
                jobPath = normalized;
            } catch (Exception e) {
                exception = new MetaverseAnalyzerException(Messages.getString("ERROR.SubJobNotFoundInParentTrans", jobPath, parentTransMeta.toString()), e);
            }
            break;
        case REPOSITORY_BY_NAME:
            if (repo != null) {
                String dir = parentTransMeta.environmentSubstitute(meta.getDirectoryPath());
                String file = parentTransMeta.environmentSubstitute(meta.getJobName());
                try {
                    RepositoryDirectoryInterface rdi = repo.findDirectory(dir);
                    subJobMeta = repo.loadJob(file, rdi, null, null);
                    String filename = subJobMeta.getFilename() == null ? subJobMeta.toString() : subJobMeta.getFilename();
                    jobPath = filename + "." + subJobMeta.getDefaultExtension();
                } catch (KettleException e) {
                    exception = new MetaverseAnalyzerException(Messages.getString("ERROR.SubJobNotFoundInParentTrans", file, parentTransMeta.toString()), e);
                }
            } else {
                exception = new MetaverseAnalyzerException(Messages.getString("ERROR.MissingConnectionForTransSubJob", parentTransMeta.toString()));
            }
            break;
        case REPOSITORY_BY_REFERENCE:
            if (repo != null) {
                try {
                    subJobMeta = repo.loadJob(meta.getJobObjectId(), null);
                    String filename = subJobMeta.getFilename() == null ? subJobMeta.toString() : subJobMeta.getFilename();
                    jobPath = filename + "." + subJobMeta.getDefaultExtension();
                } catch (KettleException e) {
                    exception = new MetaverseAnalyzerException(Messages.getString("ERROR.SubJobNotFoundInParentTrans", (meta.getJobObjectId() == null ? "N/A" : meta.getJobObjectId().toString()), parentTransMeta.toString()), e);
                }
            } else {
                exception = new MetaverseAnalyzerException(Messages.getString("ERROR.MissingConnectionForTransSubJob", parentTransMeta.toString()));
            }
            break;
    }
    rootNode.setProperty(DictionaryConst.PROPERTY_PATH, jobPath);
    if (exception != null) {
        throw exception;
    }
    subJobMeta.copyVariablesFrom(parentTransMeta);
    subJobMeta.setFilename(jobPath);
    // analyze the sub trans?
    IComponentDescriptor ds = new MetaverseComponentDescriptor(subJobMeta.getName(), DictionaryConst.NODE_TYPE_JOB, descriptor.getNamespace().getParentNamespace());
    IMetaverseNode jobNode = createNodeFromDescriptor(ds);
    jobNode.setProperty(DictionaryConst.PROPERTY_NAMESPACE, ds.getNamespaceId());
    jobNode.setProperty(DictionaryConst.PROPERTY_PATH, jobPath);
    jobNode.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DOCUMENT);
    metaverseBuilder.addLink(node, DictionaryConst.LINK_EXECUTES, jobNode);
    final IDocument subTransDocument = KettleAnalyzerUtil.buildDocument(getMetaverseBuilder(), subJobMeta, jobPath, getDocumentDescriptor().getNamespace());
    node.setProperty(JOB_TO_EXECUTE, jobPath);
    if (StringUtils.isNotEmpty(meta.getExecutionResultTargetStep())) {
        node.setProperty(EXECUTION_RESULTS_TARGET, meta.getExecutionResultTargetStep());
    }
    if (StringUtils.isNotEmpty(meta.getResultFilesTargetStep())) {
        node.setProperty(RESULT_FILES_TARGET, meta.getResultFilesTargetStep());
    }
    // pull in the sub-job lineage only if the consolidateGraphs flag is set to true
    if (MetaverseConfig.consolidateSubGraphs()) {
        final IComponentDescriptor subtransDocumentDescriptor = new MetaverseComponentDescriptor(subTransDocument.getStringID(), DictionaryConst.NODE_TYPE_TRANS, getDocumentDescriptor().getNamespace(), getDescriptor().getContext());
        // analyze the sub-job
        final JobAnalyzer jobAnalyzer = new JobAnalyzer();
        jobAnalyzer.setJobEntryAnalyzerProvider(PentahoSystem.get(IJobEntryAnalyzerProvider.class));
        jobAnalyzer.setMetaverseBuilder(getMetaverseBuilder());
        jobAnalyzer.analyze(subtransDocumentDescriptor, subJobMeta, jobNode, jobPath);
        connectToSubJobOutputFields(meta, subJobMeta, jobNode, descriptor);
    }
}
Also used : RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) IMetaverseNode(org.pentaho.metaverse.api.IMetaverseNode) KettleException(org.pentaho.di.core.exception.KettleException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleMissingPluginsException(org.pentaho.di.core.exception.KettleMissingPluginsException) FileNotFoundException(java.io.FileNotFoundException) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) MetaverseComponentDescriptor(org.pentaho.metaverse.api.MetaverseComponentDescriptor) Repository(org.pentaho.di.repository.Repository) IComponentDescriptor(org.pentaho.metaverse.api.IComponentDescriptor) JobAnalyzer(org.pentaho.metaverse.analyzer.kettle.JobAnalyzer) IJobEntryAnalyzerProvider(org.pentaho.metaverse.api.analyzer.kettle.jobentry.IJobEntryAnalyzerProvider) IDocument(org.pentaho.metaverse.api.IDocument)

Example 9 with MetaverseAnalyzerException

use of org.pentaho.metaverse.api.MetaverseAnalyzerException in project pentaho-metaverse by pentaho.

the class TransJobEntryAnalyzer method customAnalyze.

@Override
protected void customAnalyze(JobEntryTrans entry, IMetaverseNode rootNode) throws MetaverseAnalyzerException {
    TransMeta subTransMeta = null;
    JobMeta parentJobMeta = entry.getParentJob().getJobMeta();
    // For some reason the JobMeta's variables have been reset by now, so re-activate them
    parentJobMeta.activateParameters();
    Repository repo = parentJobMeta.getRepository();
    String transPath = null;
    MetaverseAnalyzerException exception = null;
    switch(entry.getSpecificationMethod()) {
        case FILENAME:
            try {
                transPath = parentJobMeta.environmentSubstitute(entry.getFilename());
                String normalized = KettleAnalyzerUtil.normalizeFilePath(transPath);
                subTransMeta = getSubTransMeta(normalized);
                entry.copyVariablesFrom(subTransMeta);
                transPath = normalized;
            } catch (Exception e) {
                exception = new MetaverseAnalyzerException(Messages.getString("ERROR.SubTransNotFoundInParentJob", transPath, parentJobMeta.toString()), e);
            }
            break;
        case REPOSITORY_BY_NAME:
            if (repo != null) {
                String dir = parentJobMeta.environmentSubstitute(entry.getDirectory());
                String file = parentJobMeta.environmentSubstitute(entry.getTransname());
                try {
                    RepositoryDirectoryInterface rdi = repo.findDirectory(dir);
                    subTransMeta = repo.loadTransformation(file, rdi, null, true, null);
                    transPath = subTransMeta.getPathAndName() + "." + subTransMeta.getDefaultExtension();
                } catch (KettleException e) {
                    exception = new MetaverseAnalyzerException(Messages.getString("ERROR.SubTransNotFoundInParentJob", file, parentJobMeta.toString()), e);
                }
            } else {
                exception = new MetaverseAnalyzerException(Messages.getString("ERROR.MissingConnectionForJobSubTrans", parentJobMeta.toString()));
            }
            break;
        case REPOSITORY_BY_REFERENCE:
            if (repo != null) {
                try {
                    subTransMeta = repo.loadTransformation(entry.getTransObjectId(), null);
                    transPath = subTransMeta.getPathAndName() + "." + subTransMeta.getDefaultExtension();
                } catch (KettleException e) {
                    exception = new MetaverseAnalyzerException(Messages.getString("ERROR.SubTransNotFoundInParentJob", (entry.getTransObjectId() == null ? "N/A" : entry.getTransObjectId().toString()), parentJobMeta.toString()), e);
                }
            } else {
                exception = new MetaverseAnalyzerException(Messages.getString("ERROR.MissingConnectionForJobSubTrans", parentJobMeta.toString()));
            }
            break;
    }
    rootNode.setProperty(DictionaryConst.PROPERTY_PATH, transPath);
    if (exception != null) {
        throw exception;
    }
    subTransMeta.copyVariablesFrom(parentJobMeta);
    subTransMeta.setFilename(transPath);
    IComponentDescriptor ds = new MetaverseComponentDescriptor(subTransMeta.getName(), DictionaryConst.NODE_TYPE_TRANS, descriptor.getNamespace().getParentNamespace());
    IMetaverseNode transformationNode = createNodeFromDescriptor(ds);
    transformationNode.setProperty(DictionaryConst.PROPERTY_NAMESPACE, ds.getNamespaceId());
    transformationNode.setProperty(DictionaryConst.PROPERTY_PATH, transPath);
    transformationNode.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DOCUMENT);
    metaverseBuilder.addLink(rootNode, DictionaryConst.LINK_EXECUTES, transformationNode);
    // pull in the sub-job lineage only if the consolidateSubGraphs flag is set to true
    if (MetaverseConfig.consolidateSubGraphs()) {
        final IDocument subTransDocument = KettleAnalyzerUtil.buildDocument(getMetaverseBuilder(), subTransMeta, transPath, getDocumentDescriptor().getNamespace());
        if (subTransDocument != null) {
            final IComponentDescriptor subtransDocumentDescriptor = new MetaverseComponentDescriptor(subTransDocument.getStringID(), DictionaryConst.NODE_TYPE_TRANS, getDocumentDescriptor().getNamespace(), getDescriptor().getContext());
            // analyze the sub-transformation
            final TransformationAnalyzer transformationAnalyzer = new TransformationAnalyzer();
            transformationAnalyzer.setStepAnalyzerProvider(PentahoSystem.get(IStepAnalyzerProvider.class));
            transformationAnalyzer.setMetaverseBuilder(getMetaverseBuilder());
            transformationAnalyzer.analyze(subtransDocumentDescriptor, subTransMeta, transformationNode, transPath);
        }
    }
}
Also used : RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) KettleException(org.pentaho.di.core.exception.KettleException) JobMeta(org.pentaho.di.job.JobMeta) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) IMetaverseNode(org.pentaho.metaverse.api.IMetaverseNode) TransMeta(org.pentaho.di.trans.TransMeta) KettleException(org.pentaho.di.core.exception.KettleException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleMissingPluginsException(org.pentaho.di.core.exception.KettleMissingPluginsException) FileNotFoundException(java.io.FileNotFoundException) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) MetaverseComponentDescriptor(org.pentaho.metaverse.api.MetaverseComponentDescriptor) Repository(org.pentaho.di.repository.Repository) IComponentDescriptor(org.pentaho.metaverse.api.IComponentDescriptor) TransformationAnalyzer(org.pentaho.metaverse.analyzer.kettle.TransformationAnalyzer) IStepAnalyzerProvider(org.pentaho.metaverse.api.analyzer.kettle.step.IStepAnalyzerProvider) IDocument(org.pentaho.metaverse.api.IDocument)

Example 10 with MetaverseAnalyzerException

use of org.pentaho.metaverse.api.MetaverseAnalyzerException in project pentaho-metaverse by pentaho.

the class DatabaseConnectionAnalyzer method analyze.

/**
 * Analyzes a database connection for metadata.
 *
 * @param dbMeta the object
 * @see org.pentaho.metaverse.api.IAnalyzer#analyze(IComponentDescriptor, java.lang.Object)
 */
@Override
public IMetaverseNode analyze(IComponentDescriptor descriptor, DatabaseMeta dbMeta) throws MetaverseAnalyzerException {
    if (dbMeta == null) {
        throw new MetaverseAnalyzerException(Messages.getString("ERROR.DatabaseMeta.IsNull"));
    }
    if (metaverseObjectFactory == null) {
        throw new MetaverseAnalyzerException(Messages.getString("ERROR.MetaverseObjectFactory.IsNull"));
    }
    if (metaverseBuilder == null) {
        throw new MetaverseAnalyzerException(Messages.getString("ERROR.MetaverseBuilder.IsNull"));
    }
    IMetaverseNode node = createNodeFromDescriptor(descriptor);
    node.setType(DictionaryConst.NODE_TYPE_DATASOURCE);
    int accessType = dbMeta.getAccessType();
    node.setProperty("accessType", accessType);
    String accessTypeDesc = dbMeta.environmentSubstitute(dbMeta.getAccessTypeDesc());
    node.setProperty("accessTypeDesc", accessTypeDesc);
    String databaseName = dbMeta.environmentSubstitute(dbMeta.getDatabaseName());
    node.setProperty("databaseName", databaseName);
    String connectionName = dbMeta.environmentSubstitute(dbMeta.getName());
    node.setProperty("name", connectionName);
    DatabaseInterface dbInterface = dbMeta.getDatabaseInterface();
    node.setProperty("databaseType", dbInterface != null ? Const.NVL(dbInterface.getPluginName(), "Unknown") : "Unknown");
    String port = dbMeta.environmentSubstitute(dbMeta.getDatabasePortNumberString());
    node.setProperty(DictionaryConst.PROPERTY_PORT, port);
    String host = dbMeta.environmentSubstitute(dbMeta.getHostname());
    node.setProperty(DictionaryConst.PROPERTY_HOST_NAME, host);
    String user = dbMeta.environmentSubstitute(dbMeta.getUsername());
    node.setProperty(DictionaryConst.PROPERTY_USER_NAME, user);
    boolean shared = dbMeta.isShared();
    node.setProperty("shared", shared);
    if (accessTypeDesc != null && accessTypeDesc.equals("JNDI")) {
        node.setLogicalIdGenerator(DictionaryConst.LOGICAL_ID_GENERATOR_DB_JNDI);
    } else {
        node.setLogicalIdGenerator(getLogicalIdGenerator());
    }
    return node;
}
Also used : DatabaseInterface(org.pentaho.di.core.database.DatabaseInterface) MetaverseAnalyzerException(org.pentaho.metaverse.api.MetaverseAnalyzerException) IMetaverseNode(org.pentaho.metaverse.api.IMetaverseNode)

Aggregations

MetaverseAnalyzerException (org.pentaho.metaverse.api.MetaverseAnalyzerException)23 IMetaverseNode (org.pentaho.metaverse.api.IMetaverseNode)13 IComponentDescriptor (org.pentaho.metaverse.api.IComponentDescriptor)9 MetaverseComponentDescriptor (org.pentaho.metaverse.api.MetaverseComponentDescriptor)9 KettleException (org.pentaho.di.core.exception.KettleException)7 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)6 TransMeta (org.pentaho.di.trans.TransMeta)6 JobMeta (org.pentaho.di.job.JobMeta)5 INamespace (org.pentaho.metaverse.api.INamespace)5 Namespace (org.pentaho.metaverse.api.Namespace)5 FileNotFoundException (java.io.FileNotFoundException)4 KettleMissingPluginsException (org.pentaho.di.core.exception.KettleMissingPluginsException)4 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)4 Repository (org.pentaho.di.repository.Repository)4 RepositoryDirectoryInterface (org.pentaho.di.repository.RepositoryDirectoryInterface)4 BaseStepMeta (org.pentaho.di.trans.step.BaseStepMeta)4 HashSet (java.util.HashSet)3 ProgressNullMonitorListener (org.pentaho.di.core.ProgressNullMonitorListener)3 KettleStepException (org.pentaho.di.core.exception.KettleStepException)3 IDocument (org.pentaho.metaverse.api.IDocument)3