use of org.pentaho.di.core.parameters.UnknownParamException in project pentaho-metaverse by pentaho.
the class TransMetaJsonSerializerTest method testSerializeParams.
@Test
public void testSerializeParams() throws Exception {
String[] params = new String[] { "param1", "param2", "invalid" };
when(transMeta.listParameters()).thenReturn(params);
when(transMeta.getParameterDescription("param1")).thenReturn("paramDescription");
when(transMeta.getParameterDefault("param1")).thenReturn("defaultValue");
when(transMeta.getParameterDescription("param2")).thenReturn("paramDescription");
when(transMeta.getParameterDefault("param2")).thenReturn("defaultValue");
// get some exception handling code coverage
when(transMeta.getParameterDescription("invalid")).thenThrow(new UnknownParamException());
serializer.serializeParameters(transMeta, json);
verify(json).writeArrayFieldStart("parameters");
verify(json, times(params.length - 1)).writeObject(any(IParamInfo.class));
}
use of org.pentaho.di.core.parameters.UnknownParamException in project pentaho-metaverse by pentaho.
the class TransformationAnalyzer method analyze.
@Override
public synchronized IMetaverseNode analyze(final IComponentDescriptor documentDescriptor, final AbstractMeta meta, final IMetaverseNode node, final String documentPath) throws MetaverseAnalyzerException {
final TransMeta transMeta = (TransMeta) meta;
Trans t = new Trans(transMeta);
t.setInternalKettleVariables(transMeta);
// pull out the standard fields
String description = transMeta.getDescription();
if (description != null) {
node.setProperty(DictionaryConst.PROPERTY_DESCRIPTION, description);
}
String extendedDescription = transMeta.getExtendedDescription();
if (extendedDescription != null) {
node.setProperty("extendedDescription", extendedDescription);
}
Date createdDate = transMeta.getCreatedDate();
if (createdDate != null) {
node.setProperty(DictionaryConst.PROPERTY_CREATED, Long.toString(createdDate.getTime()));
}
String createdUser = transMeta.getCreatedUser();
if (createdUser != null) {
node.setProperty(DictionaryConst.PROPERTY_CREATED_BY, createdUser);
}
Date lastModifiedDate = transMeta.getModifiedDate();
if (lastModifiedDate != null) {
node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED, Long.toString(lastModifiedDate.getTime()));
}
String lastModifiedUser = transMeta.getModifiedUser();
if (lastModifiedUser != null) {
node.setProperty(DictionaryConst.PROPERTY_LAST_MODIFIED_BY, lastModifiedUser);
}
String version = transMeta.getTransversion();
if (version != null) {
node.setProperty(DictionaryConst.PROPERTY_ARTIFACT_VERSION, version);
}
String status = Messages.getString("INFO.JobOrTrans.Status_" + Integer.toString(transMeta.getTransstatus()));
if (status != null && !status.startsWith("!")) {
node.setProperty(DictionaryConst.PROPERTY_STATUS, status);
}
node.setProperty(DictionaryConst.PROPERTY_PATH, documentPath);
String[] parameters = transMeta.listParameters();
if (parameters != null) {
for (String parameter : parameters) {
try {
// Determine parameter properties and add them to a map, then the map to the list
String defaultParameterValue = transMeta.getParameterDefault(parameter);
String parameterValue = transMeta.getParameterValue(parameter);
String parameterDescription = transMeta.getParameterDescription(parameter);
PropertiesHolder paramProperties = new PropertiesHolder();
paramProperties.setProperty("defaultValue", defaultParameterValue);
paramProperties.setProperty("value", parameterValue);
paramProperties.setProperty("description", parameterDescription);
node.setProperty("parameter_" + parameter, paramProperties.toString());
} catch (UnknownParamException upe) {
// This shouldn't happen as we're using the list provided by the meta
throw new MetaverseAnalyzerException(upe);
}
}
}
final List<AnalyzerHolder> analyzerHolders = new ArrayList();
// handle the step
for (int stepNr = 0; stepNr < transMeta.nrSteps(); stepNr++) {
StepMeta stepMeta = transMeta.getStep(stepNr);
try {
if (stepMeta != null) {
if (stepMeta.getParentTransMeta() == null) {
stepMeta.setParentTransMeta(transMeta);
}
IMetaverseNode stepNode = null;
IComponentDescriptor stepDescriptor = new MetaverseComponentDescriptor(stepMeta.getName(), DictionaryConst.NODE_TYPE_TRANS_STEP, node, documentDescriptor.getContext());
Set<IStepAnalyzer> stepAnalyzers = getStepAnalyzers(stepMeta);
final BaseStepMeta baseStepMeta = getBaseStepMetaFromStepMeta(stepMeta);
if (stepAnalyzers != null && !stepAnalyzers.isEmpty()) {
for (IStepAnalyzer stepAnalyzer : stepAnalyzers) {
// transformation execution and does not change while the transformation is being analyzed
if (stepAnalyzer instanceof IClonableStepAnalyzer) {
stepAnalyzer = ((IClonableStepAnalyzer) stepAnalyzer).cloneAnalyzer();
((IClonableStepAnalyzer) stepAnalyzer).setDocumentAnalyzer(this);
((IClonableStepAnalyzer) stepAnalyzer).setDocumentDescriptor(documentDescriptor);
((IClonableStepAnalyzer) stepAnalyzer).setDocumentPath(documentPath);
} else {
log.debug(Messages.getString("WARNING.CannotCloneAnalyzer"), stepAnalyzer);
}
stepAnalyzer.setMetaverseBuilder(metaverseBuilder);
stepNode = (IMetaverseNode) stepAnalyzer.analyze(stepDescriptor, baseStepMeta);
analyzerHolders.add(new AnalyzerHolder(stepAnalyzer, baseStepMeta, stepNode));
}
} else if ((new AnnotatedClassFields(baseStepMeta)).hasMetaverseAnnotations()) {
AnnotationDrivenStepMetaAnalyzer annotationDrivenStepMetaAnalyzer = new AnnotationDrivenStepMetaAnalyzer(baseStepMeta);
annotationDrivenStepMetaAnalyzer.setMetaverseBuilder(metaverseBuilder);
annotationDrivenStepMetaAnalyzer.setDocumentAnalyzer(this);
annotationDrivenStepMetaAnalyzer.setDocumentDescriptor(documentDescriptor);
annotationDrivenStepMetaAnalyzer.setDocumentPath(documentPath);
stepNode = annotationDrivenStepMetaAnalyzer.analyze(stepDescriptor, baseStepMeta);
analyzerHolders.add(new AnalyzerHolder(annotationDrivenStepMetaAnalyzer, baseStepMeta, stepNode));
} else {
GenericStepMetaAnalyzer defaultStepAnalyzer = new GenericStepMetaAnalyzer();
defaultStepAnalyzer.setMetaverseBuilder(metaverseBuilder);
stepNode = defaultStepAnalyzer.analyze(stepDescriptor, getBaseStepMetaFromStepMeta(stepMeta));
}
if (stepNode != null) {
metaverseBuilder.addLink(node, DictionaryConst.LINK_CONTAINS, stepNode);
}
}
} catch (Throwable mae) {
// Don't throw an exception, just log and carry on
log.warn(Messages.getString("ERROR.ErrorDuringAnalysis", stepMeta.getName(), Const.NVL(mae.getLocalizedMessage(), "Unspecified")));
log.debug(Messages.getString("ERROR.ErrorDuringAnalysisStackTrace"), mae);
}
}
// Model the hops between steps
int numHops = transMeta.nrTransHops();
for (int i = 0; i < numHops; i++) {
TransHopMeta hop = transMeta.getTransHop(i);
StepMeta fromStep = hop.getFromStep();
StepMeta toStep = hop.getToStep();
INamespace childNs = new Namespace(node.getLogicalId());
// process legitimate hops
if (fromStep != null && toStep != null) {
IMetaverseNode fromStepNode = metaverseObjectFactory.createNodeObject(childNs, fromStep.getName(), DictionaryConst.NODE_TYPE_TRANS_STEP);
IMetaverseNode toStepNode = metaverseObjectFactory.createNodeObject(childNs, toStep.getName(), DictionaryConst.NODE_TYPE_TRANS_STEP);
// Create and decorate the link between the steps
IMetaverseLink link = metaverseObjectFactory.createLinkObject();
link.setFromNode(fromStepNode);
link.setLabel(DictionaryConst.LINK_HOPSTO);
link.setToNode(toStepNode);
// Is this hop enabled?
link.setProperty(DictionaryConst.PROPERTY_ENABLED, hop.isEnabled());
// Add metadata about the type of stream (target, error, info) it is. Default to "target".
String linkType = "target";
if (fromStep.isSendingErrorRowsToStep(toStep)) {
linkType = "error";
} else {
String[] infoStepnames = toStep.getStepMetaInterface().getStepIOMeta().getInfoStepnames();
// If the "from" step is the source of an info stream to the "to" step, it's an "info" hop
if (Const.indexOfString(fromStep.getName(), infoStepnames) >= 0) {
linkType = "info";
}
}
link.setProperty(DictionaryConst.PROPERTY_TYPE, linkType);
metaverseBuilder.addLink(link);
}
}
metaverseBuilder.addNode(node);
addParentLink(documentDescriptor, node);
// perform any necessary post processing - currently only supported on IClonableStepAnalyzers
for (final AnalyzerHolder analyzerHolder : analyzerHolders) {
if (analyzerHolder.getAnalyzer() instanceof IClonableStepAnalyzer) {
final IClonableStepAnalyzer clonableAnalyzer = (IClonableStepAnalyzer) analyzerHolder.getAnalyzer();
clonableAnalyzer.postAnalyze(analyzerHolder.getMeta());
}
}
return node;
}
use of org.pentaho.di.core.parameters.UnknownParamException in project pentaho-metaverse by pentaho.
the class JobRuntimeExtensionPoint method populateExecutionProfile.
protected void populateExecutionProfile(IExecutionProfile executionProfile, Job job) {
JobMeta jobMeta = job.getJobMeta();
String filename = getFilename(job);
String filePath = null;
if (job.getRep() == null) {
try {
filePath = KettleAnalyzerUtil.normalizeFilePath(filename);
} catch (Exception e) {
log.warn("Couldn't normalize file path: " + filename, e);
filePath = filename;
}
} else {
filePath = filename;
}
// Set artifact information (path, type, description, etc.)
executionProfile.setPath(filePath);
executionProfile.setName(jobMeta.getName());
executionProfile.setType(DictionaryConst.NODE_TYPE_JOB);
executionProfile.setDescription(jobMeta.getDescription());
// Set execution engine information
executionProfile.setExecutionEngine(getExecutionEngineInfo());
IExecutionData executionData = executionProfile.getExecutionData();
// Store execution information (client, server, user, etc.)
executionData.setEndTime(new Timestamp(new Date().getTime()));
KettleClientEnvironment.ClientType clientType = KettleClientEnvironment.getInstance().getClient();
executionData.setClientExecutor(clientType == null ? "DI Server" : clientType.name());
executionData.setExecutorUser(job.getExecutingUser());
executionData.setExecutorServer(job.getExecutingServer());
Result result = job.getResult();
if (result != null) {
executionData.setFailureCount(result.getNrErrors());
}
// Store variables
List<String> vars = jobMeta.getUsedVariables();
Map<Object, Object> variableMap = executionData.getVariables();
for (String var : vars) {
String value = job.getVariable(var);
if (value != null) {
variableMap.put(var, value);
}
}
// Store parameters
String[] params = job.listParameters();
List<IParamInfo<String>> paramList = executionData.getParameters();
if (params != null) {
for (String param : params) {
try {
ParamInfo paramInfo = new ParamInfo(param, job.getParameterDescription(param), job.getParameterDefault(param));
paramList.add(paramInfo);
} catch (UnknownParamException e) {
e.printStackTrace();
}
}
}
// Store arguments
String[] args = job.getArguments();
List<Object> argList = executionData.getArguments();
if (args != null) {
argList.addAll(Arrays.asList(args));
}
}
use of org.pentaho.di.core.parameters.UnknownParamException in project pentaho-metaverse by pentaho.
the class TransformationRuntimeExtensionPoint method populateExecutionProfile.
protected void populateExecutionProfile(IExecutionProfile executionProfile, Trans trans) {
TransMeta transMeta = trans.getTransMeta();
String filename = trans.getFilename();
if (filename == null) {
filename = transMeta.getPathAndName();
}
String filePath = null;
if (trans.getRepository() == null) {
try {
filePath = KettleAnalyzerUtil.normalizeFilePath(filename);
} catch (Exception e) {
log.warn("Couldn't normalize file path: " + filename, e);
filePath = filename;
}
} else {
filePath = filename;
}
// Set artifact information (path, type, description, etc.)
executionProfile.setPath(filePath);
executionProfile.setName(transMeta.getName());
executionProfile.setType(DictionaryConst.NODE_TYPE_TRANS);
executionProfile.setDescription(transMeta.getDescription());
// Set execution engine information
executionProfile.setExecutionEngine(getExecutionEngineInfo());
IExecutionData executionData = executionProfile.getExecutionData();
// Store execution information (client, server, user, etc.)
executionData.setEndTime(new Timestamp(new Date().getTime()));
KettleClientEnvironment.ClientType clientType = KettleClientEnvironment.getInstance().getClient();
executionData.setClientExecutor(clientType == null ? "DI Server" : clientType.name());
executionData.setExecutorUser(trans.getExecutingUser());
executionData.setExecutorServer(trans.getExecutingServer());
Result result = trans.getResult();
if (result != null) {
executionData.setFailureCount(result.getNrErrors());
}
// Store variables
List<String> vars = transMeta.getUsedVariables();
Map<Object, Object> variableMap = executionData.getVariables();
for (String var : vars) {
String value = trans.getVariable(var);
if (var != null && value != null) {
variableMap.put(var, value);
}
}
// Store parameters
String[] params = trans.listParameters();
List<IParamInfo<String>> paramList = executionData.getParameters();
if (params != null) {
for (String param : params) {
try {
ParamInfo paramInfo = new ParamInfo(param, trans.getParameterDescription(param), trans.getParameterDefault(param));
paramList.add(paramInfo);
} catch (UnknownParamException e) {
log.error("Couldn't find transformation parameter: " + param, e);
}
}
}
// Store arguments
String[] args = trans.getArguments();
List<Object> argList = executionData.getArguments();
if (args != null) {
argList.addAll(Arrays.asList(args));
}
}
use of org.pentaho.di.core.parameters.UnknownParamException in project pentaho-kettle by pentaho.
the class KitchenCommandExecutor method execute.
public Result execute(Params params, String[] arguments) throws Throwable {
getLog().logMinimal(BaseMessages.getString(getPkgClazz(), "Kitchen.Log.Starting"));
logDebug("Kitchen.Log.AllocateNewJob");
Job job = null;
// In case we use a repository...
Repository repository = null;
try {
if (getMetaStore() == null) {
setMetaStore(createDefaultMetastore());
}
// Read kettle job specified on command-line?
if (!Utils.isEmpty(params.getRepoName()) || !Utils.isEmpty(params.getLocalFile())) {
logDebug("Kitchen.Log.ParsingCommandLine");
if (!Utils.isEmpty(params.getRepoName()) && !isEnabled(params.getBlockRepoConns())) {
/**
* if set, _trust_user_ needs to be considered. See pur-plugin's:
*
* @link https://github.com/pentaho/pentaho-kettle/blob/8.0.0.0-R/plugins/pur/core/src/main/java/org/pentaho/di/repository/pur/PurRepositoryConnector.java#L97-L101
* @link https://github.com/pentaho/pentaho-kettle/blob/8.0.0.0-R/plugins/pur/core/src/main/java/org/pentaho/di/repository/pur/WebServiceManager.java#L130-L133
*/
if (isEnabled(params.getTrustRepoUser())) {
System.setProperty("pentaho.repository.client.attemptTrust", YES);
}
// In case we use a repository...
// some commands are to load a Trans from the repo; others are merely to print some repo-related information
RepositoryMeta repositoryMeta = loadRepositoryConnection(params.getRepoName(), "Kitchen.Log.LoadingRep", "Kitchen.Error.NoRepDefinied", "Kitchen.Log.FindingRep");
if (repositoryMeta == null) {
System.out.println(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.CanNotConnectRep"));
return exitWithStatus(CommandExecutorCodes.Kitchen.COULD_NOT_LOAD_JOB.getCode());
}
logDebug("Kitchen.Log.CheckUserPass");
repository = establishRepositoryConnection(repositoryMeta, params.getRepoUsername(), params.getRepoPassword(), RepositoryOperation.EXECUTE_JOB);
// If so, nothing else is needed ( other than executing the actual requested operation )
if (isEnabled(params.getListRepoFiles()) || isEnabled(params.getListRepoDirs())) {
executeRepositoryBasedCommand(repository, params.getInputDir(), params.getListRepoFiles(), params.getListRepoDirs());
return exitWithStatus(CommandExecutorCodes.Kitchen.SUCCESS.getCode());
}
job = loadJobFromRepository(repository, params.getInputDir(), params.getInputFile());
}
// Try to load if from file
if (job == null) {
// Try to load the job from file, even if it failed to load from the repository
job = loadJobFromFilesystem(params.getLocalInitialDir(), params.getLocalFile(), params.getBase64Zip());
}
} else if (isEnabled(params.getListRepos())) {
// list the repositories placed at repositories.xml
printRepositories(loadRepositoryInfo("Kitchen.Log.ListRep", "Kitchen.Error.NoRepDefinied"));
}
} catch (KettleException e) {
job = null;
if (repository != null) {
repository.disconnect();
}
System.out.println(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.StopProcess", e.getMessage()));
}
if (job == null) {
if (!isEnabled(params.getListRepoFiles()) && !isEnabled(params.getListRepoDirs()) && !isEnabled(params.getListRepos())) {
System.out.println(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.canNotLoadJob"));
}
return exitWithStatus(CommandExecutorCodes.Kitchen.COULD_NOT_LOAD_JOB.getCode(), job);
}
if (!Utils.isEmpty(params.getExportRepo())) {
try {
// Export the resources linked to the currently loaded file...
TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface(params.getExportRepo(), job.getJobMeta(), job, repository, getMetaStore());
String launchFile = topLevelResource.getResourceName();
String message = ResourceUtil.getExplanation(params.getExportRepo(), launchFile, job.getJobMeta());
System.out.println();
System.out.println(message);
// Setting the list parameters option will make kitchen exit below in the parameters section
(params).setListFileParams(YES);
} catch (Exception e) {
System.out.println(Const.getStackTracker(e));
return exitWithStatus(CommandExecutorCodes.Kitchen.UNEXPECTED_ERROR.getCode());
}
}
Date start = Calendar.getInstance().getTime();
try {
// Set the command line arguments on the job ...
job.setArguments(arguments);
job.initializeVariablesFrom(null);
job.setLogLevel(getLog().getLogLevel());
job.getJobMeta().setInternalKettleVariables(job);
job.setRepository(repository);
job.getJobMeta().setRepository(repository);
job.getJobMeta().setMetaStore(getMetaStore());
// Map the command line named parameters to the actual named parameters. Skip for
// the moment any extra command line parameter not known in the job.
String[] jobParams = job.getJobMeta().listParameters();
for (String param : jobParams) {
try {
String value = params.getNamedParams().getParameterValue(param);
if (value != null) {
job.getJobMeta().setParameterValue(param, value);
}
} catch (UnknownParamException e) {
/* no-op */
}
}
job.copyParametersFrom(job.getJobMeta());
// Put the parameters over the already defined variable space. Parameters get priority.
job.activateParameters();
// Set custom options in the job extension map as Strings
for (String optionName : params.getCustomNamedParams().listParameters()) {
try {
String optionValue = params.getCustomNamedParams().getParameterValue(optionName);
if (optionName != null && optionValue != null) {
job.getExtensionDataMap().put(optionName, optionValue);
}
} catch (UnknownParamException e) {
/* no-op */
}
}
// List the parameters defined in this job, then simply exit...
if (isEnabled(params.getListFileParams())) {
printJobParameters(job);
// same as the other list options
return exitWithStatus(CommandExecutorCodes.Kitchen.COULD_NOT_LOAD_JOB.getCode());
}
// Execute the selected job.
job.start();
job.waitUntilFinished();
// get the execution result
setResult(job.getResult());
} finally {
if (repository != null) {
repository.disconnect();
}
if (isEnabled(params.getTrustRepoUser())) {
// we set it, now we sanitize it
System.clearProperty("pentaho.repository.client.attemptTrust");
}
}
getLog().logMinimal(BaseMessages.getString(getPkgClazz(), "Kitchen.Log.Finished"));
int returnCode = getReturnCode();
Date stop = Calendar.getInstance().getTime();
calculateAndPrintElapsedTime(start, stop, "Kitchen.Log.StartStop", "Kitchen.Log.ProcessEndAfter", "Kitchen.Log.ProcessEndAfterLong", "Kitchen.Log.ProcessEndAfterLonger", "Kitchen.Log.ProcessEndAfterLongest");
getResult().setElapsedTimeMillis(stop.getTime() - start.getTime());
return exitWithStatus(returnCode);
}
Aggregations