use of org.pentaho.di.core.parameters.NamedParamsDefault in project pentaho-kettle by pentaho.
the class TransMeta method realClone.
/**
* Perform a real clone of the transformation meta-data object, including cloning all lists and copying all values. If
* the doClear parameter is true, the clone will be cleared of ALL values before the copy. If false, only the copied
* fields will be cleared.
*
* @param doClear
* Whether to clear all of the clone's data before copying from the source object
* @return a real clone of the calling object
*/
public Object realClone(boolean doClear) {
try {
TransMeta transMeta = (TransMeta) super.clone();
if (doClear) {
transMeta.clear();
} else {
// Clear out the things we're replacing below
transMeta.databases = new ArrayList<>();
transMeta.steps = new ArrayList<>();
transMeta.hops = new ArrayList<>();
transMeta.notes = new ArrayList<>();
transMeta.dependencies = new ArrayList<>();
transMeta.partitionSchemas = new ArrayList<>();
transMeta.slaveServers = new ArrayList<>();
transMeta.clusterSchemas = new ArrayList<>();
transMeta.namedParams = new NamedParamsDefault();
transMeta.stepChangeListeners = new ArrayList<>();
}
for (DatabaseMeta db : databases) {
transMeta.addDatabase((DatabaseMeta) db.clone());
}
for (StepMeta step : steps) {
transMeta.addStep((StepMeta) step.clone());
}
// PDI-15799: Step references are original yet. Set them to the clones.
for (StepMeta step : transMeta.getSteps()) {
final StepMetaInterface stepMetaInterface = step.getStepMetaInterface();
if (stepMetaInterface != null) {
final StepIOMetaInterface stepIOMeta = stepMetaInterface.getStepIOMeta();
if (stepIOMeta != null) {
for (StreamInterface stream : stepIOMeta.getInfoStreams()) {
String streamStepName = stream.getStepname();
if (streamStepName != null) {
StepMeta streamStepMeta = transMeta.findStep(streamStepName);
stream.setStepMeta(streamStepMeta);
}
}
}
}
}
for (TransHopMeta hop : hops) {
transMeta.addTransHop((TransHopMeta) hop.clone());
}
for (NotePadMeta note : notes) {
transMeta.addNote((NotePadMeta) note.clone());
}
for (TransDependency dep : dependencies) {
transMeta.addDependency((TransDependency) dep.clone());
}
for (SlaveServer slave : slaveServers) {
transMeta.getSlaveServers().add((SlaveServer) slave.clone());
}
for (ClusterSchema schema : clusterSchemas) {
transMeta.getClusterSchemas().add(schema.clone());
}
for (PartitionSchema schema : partitionSchemas) {
transMeta.getPartitionSchemas().add((PartitionSchema) schema.clone());
}
for (String key : listParameters()) {
transMeta.addParameterDefinition(key, getParameterDefault(key), getParameterDescription(key));
}
return transMeta;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
use of org.pentaho.di.core.parameters.NamedParamsDefault in project pentaho-kettle by pentaho.
the class JobEntryTrans method execute.
/**
* Execute this job entry and return the result. In this case it means, just set the result boolean in the Result
* class.
*
* @param result The result of the previous execution
* @param nr the job entry number
* @return The Result of the execution.
*/
@Override
public Result execute(Result result, int nr) throws KettleException {
result.setEntryNr(nr);
LogChannelFileWriter logChannelFileWriter = null;
LogLevel transLogLevel = parentJob.getLogLevel();
// Set Embedded NamedCluter MetatStore Provider Key so that it can be passed to VFS
if (parentJobMeta.getNamedClusterEmbedManager() != null) {
parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
}
String realLogFilename = "";
if (setLogfile) {
transLogLevel = logFileLevel;
realLogFilename = environmentSubstitute(getLogFilename());
// if we do not have one, we must fail
if (Utils.isEmpty(realLogFilename)) {
logError(BaseMessages.getString(PKG, "JobTrans.Exception.LogFilenameMissing"));
result.setNrErrors(1);
result.setResult(false);
return result;
}
// create parent folder?
if (!FileUtil.createParentFolder(PKG, realLogFilename, createParentFolder, this.getLogChannel(), this)) {
result.setNrErrors(1);
result.setResult(false);
return result;
}
try {
logChannelFileWriter = new LogChannelFileWriter(this.getLogChannelId(), KettleVFS.getFileObject(realLogFilename, this), setAppendLogfile);
logChannelFileWriter.startLogging();
} catch (KettleException e) {
logError(BaseMessages.getString(PKG, "JobTrans.Error.UnableOpenAppender", realLogFilename, e.toString()));
logError(Const.getStackTracker(e));
result.setNrErrors(1);
result.setResult(false);
return result;
}
}
//
switch(specificationMethod) {
case FILENAME:
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobTrans.Log.OpeningTrans", environmentSubstitute(getFilename())));
}
break;
case REPOSITORY_BY_NAME:
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobTrans.Log.OpeningTransInDirec", environmentSubstitute(getFilename()), environmentSubstitute(directory)));
}
break;
case REPOSITORY_BY_REFERENCE:
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobTrans.Log.OpeningTransByReference", transObjectId));
}
break;
default:
break;
}
// Load the transformation only once for the complete loop!
// Throws an exception if it was not possible to load the transformation. For example, the XML file doesn't exist or
// the repository is down.
// Log the stack trace and return an error condition from this
//
TransMeta transMeta = null;
try {
transMeta = getTransMeta(rep, metaStore, this);
} catch (KettleException e) {
logError(BaseMessages.getString(PKG, "JobTrans.Exception.UnableToRunJob", parentJobMeta.getName(), getName(), StringUtils.trim(e.getMessage())), e);
result.setNrErrors(1);
result.setResult(false);
return result;
}
int iteration = 0;
String[] args1 = arguments;
if (args1 == null || args1.length == 0) {
// No arguments set, look at the parent job.
args1 = parentJob.getArguments();
}
// initializeVariablesFrom(parentJob);
//
// For the moment only do variable translation at the start of a job, not
// for every input row (if that would be switched on). This is for safety,
// the real argument setting is later on.
//
String[] args = null;
if (args1 != null) {
args = new String[args1.length];
for (int idx = 0; idx < args1.length; idx++) {
args[idx] = environmentSubstitute(args1[idx]);
}
}
RowMetaAndData resultRow = null;
boolean first = true;
List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(result.getRows());
while ((first && !execPerRow) || (execPerRow && rows != null && iteration < rows.size() && result.getNrErrors() == 0) && !parentJob.isStopped()) {
//
if (execPerRow) {
result.getRows().clear();
}
if (rows != null && execPerRow) {
resultRow = rows.get(iteration);
} else {
resultRow = null;
}
NamedParams namedParam = new NamedParamsDefault();
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
//
namedParam.addParameterDefinition(parameters[idx], "", "Job entry runtime");
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
// There is no field name specified.
//
String value = Const.NVL(environmentSubstitute(parameterValues[idx]), "");
namedParam.setParameterValue(parameters[idx], value);
} else {
// something filled in, in the field column...
//
String value = "";
if (resultRow != null) {
value = resultRow.getString(parameterFieldNames[idx], "");
}
namedParam.setParameterValue(parameters[idx], value);
}
}
}
}
first = false;
Result previousResult = result;
try {
if (isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobTrans.StartingTrans", getFilename(), getName(), getDescription()));
}
if (clearResultRows) {
previousResult.setRows(new ArrayList<RowMetaAndData>());
}
if (clearResultFiles) {
previousResult.getResultFiles().clear();
}
/*
* Set one or more "result" rows on the transformation...
*/
if (execPerRow) {
if (argFromPrevious) {
// Copy the input row to the (command line) arguments
args = null;
if (resultRow != null) {
args = new String[resultRow.size()];
for (int i = 0; i < resultRow.size(); i++) {
args[i] = resultRow.getString(i, null);
}
}
} else {
// Just pass a single row
List<RowMetaAndData> newList = new ArrayList<RowMetaAndData>();
newList.add(resultRow);
// This previous result rows list can be either empty or not.
// Depending on the checkbox "clear result rows"
// In this case, it would execute the transformation with one extra row each time
// Can't figure out a real use-case for it, but hey, who am I to decide that, right?
// :-)
//
previousResult.getRows().addAll(newList);
}
if (paramsFromPrevious) {
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
} else {
String fieldValue = "";
if (resultRow != null) {
fieldValue = resultRow.getString(parameterFieldNames[idx], "");
}
// Get the value from the input stream
namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
}
}
}
}
}
} else {
if (argFromPrevious) {
// Only put the first Row on the arguments
args = null;
if (resultRow != null) {
args = new String[resultRow.size()];
for (int i = 0; i < resultRow.size(); i++) {
args[i] = resultRow.getString(i, null);
}
}
}
if (paramsFromPrevious) {
// Copy the input the parameters
if (parameters != null) {
for (int idx = 0; idx < parameters.length; idx++) {
if (!Utils.isEmpty(parameters[idx])) {
// We have a parameter
if (Utils.isEmpty(Const.trim(parameterFieldNames[idx]))) {
namedParam.setParameterValue(parameters[idx], Const.NVL(environmentSubstitute(parameterValues[idx]), ""));
} else {
String fieldValue = "";
if (resultRow != null) {
fieldValue = resultRow.getString(parameterFieldNames[idx], "");
}
// Get the value from the input stream
namedParam.setParameterValue(parameters[idx], Const.NVL(fieldValue, ""));
}
}
}
}
}
}
// Handle the parameters...
//
transMeta.clearParameters();
String[] parameterNames = transMeta.listParameters();
prepareFieldNamesParameters(parameters, parameterFieldNames, parameterValues, namedParam, this);
StepWithMappingMeta.activateParams(transMeta, transMeta, this, parameterNames, parameters, parameterValues, isPassingAllParameters());
boolean doFallback = true;
SlaveServer remoteSlaveServer = null;
TransExecutionConfiguration executionConfiguration = new TransExecutionConfiguration();
if (!Utils.isEmpty(runConfiguration)) {
runConfiguration = environmentSubstitute(runConfiguration);
log.logBasic(BaseMessages.getString(PKG, "JobTrans.RunConfig.Message"), runConfiguration);
executionConfiguration.setRunConfiguration(runConfiguration);
try {
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, parentJob.getJobMeta(), transMeta, rep });
List<Object> items = Arrays.asList(runConfiguration, false);
try {
ExtensionPointHandler.callExtensionPoint(log, KettleExtensionPoint.RunConfigurationSelection.id, items);
if (waitingToFinish && (Boolean) items.get(IS_PENTAHO)) {
String jobName = parentJob.getJobMeta().getName();
String name = transMeta.getName();
logBasic(BaseMessages.getString(PKG, "JobTrans.Log.InvalidRunConfigurationCombination", jobName, name, jobName));
}
} catch (Exception ignored) {
// Ignored
}
if (!executionConfiguration.isExecutingLocally() && !executionConfiguration.isExecutingRemotely() && !executionConfiguration.isExecutingClustered()) {
result.setResult(true);
return result;
}
clustering = executionConfiguration.isExecutingClustered();
remoteSlaveServer = executionConfiguration.getRemoteServer();
doFallback = false;
} catch (KettleException e) {
log.logError(e.getMessage(), getName());
result.setNrErrors(1);
result.setResult(false);
return result;
}
}
if (doFallback) {
//
if (!Utils.isEmpty(remoteSlaveServerName)) {
String realRemoteSlaveServerName = environmentSubstitute(remoteSlaveServerName);
remoteSlaveServer = parentJob.getJobMeta().findSlaveServer(realRemoteSlaveServerName);
if (remoteSlaveServer == null) {
throw new KettleException(BaseMessages.getString(PKG, "JobTrans.Exception.UnableToFindRemoteSlaveServer", realRemoteSlaveServerName));
}
}
}
//
if (clustering) {
executionConfiguration.setClusterPosting(true);
executionConfiguration.setClusterPreparing(true);
executionConfiguration.setClusterStarting(true);
executionConfiguration.setClusterShowingTransformation(false);
executionConfiguration.setSafeModeEnabled(false);
executionConfiguration.setRepository(rep);
executionConfiguration.setLogLevel(transLogLevel);
executionConfiguration.setPreviousResult(previousResult);
// Also pass the variables from the transformation into the execution configuration
// That way it can go over the HTTP connection to the slave server.
//
executionConfiguration.setVariables(transMeta);
// Also set the arguments...
//
executionConfiguration.setArgumentStrings(args);
if (parentJob.getJobMeta().isBatchIdPassed()) {
executionConfiguration.setPassedBatchId(parentJob.getPassedBatchId());
}
TransSplitter transSplitter = null;
long errors = 0;
try {
transSplitter = Trans.executeClustered(transMeta, executionConfiguration);
// Monitor the running transformations, wait until they are done.
// Also kill them all if anything goes bad
// Also clean up afterwards...
//
errors += Trans.monitorClusteredTransformation(log, transSplitter, parentJob);
} catch (Exception e) {
logError("Error during clustered execution. Cleaning up clustered execution.", e);
// In case something goes wrong, make sure to clean up afterwards!
//
errors++;
if (transSplitter != null) {
Trans.cleanupCluster(log, transSplitter);
} else {
// Try to clean anyway...
//
SlaveServer master = null;
for (StepMeta stepMeta : transMeta.getSteps()) {
if (stepMeta.isClustered()) {
for (SlaveServer slaveServer : stepMeta.getClusterSchema().getSlaveServers()) {
if (slaveServer.isMaster()) {
master = slaveServer;
break;
}
}
}
}
if (master != null) {
master.deAllocateServerSockets(transMeta.getName(), null);
}
}
}
result.clear();
if (transSplitter != null) {
Result clusterResult = Trans.getClusteredTransformationResult(log, transSplitter, parentJob, executionConfiguration.isLogRemoteExecutionLocally());
result.add(clusterResult);
}
result.setNrErrors(result.getNrErrors() + errors);
} else if (remoteSlaveServer != null) {
// Execute this transformation remotely
//
// Make sure we can parameterize the slave server connection
//
remoteSlaveServer.shareVariablesWith(this);
// Remote execution...
//
executionConfiguration.setPreviousResult(previousResult.clone());
executionConfiguration.setArgumentStrings(args);
executionConfiguration.setVariables(this);
executionConfiguration.setRemoteServer(remoteSlaveServer);
executionConfiguration.setLogLevel(transLogLevel);
executionConfiguration.setRepository(rep);
executionConfiguration.setLogFileName(realLogFilename);
executionConfiguration.setSetAppendLogfile(setAppendLogfile);
executionConfiguration.setSetLogfile(setLogfile);
Map<String, String> params = executionConfiguration.getParams();
for (String param : transMeta.listParameters()) {
String value = Const.NVL(transMeta.getParameterValue(param), Const.NVL(transMeta.getParameterDefault(param), transMeta.getVariable(param)));
params.put(param, value);
}
if (parentJob.getJobMeta().isBatchIdPassed()) {
executionConfiguration.setPassedBatchId(parentJob.getPassedBatchId());
}
// Send the XML over to the slave server
// Also start the transformation over there...
//
String carteObjectId = Trans.sendToSlaveServer(transMeta, executionConfiguration, rep, metaStore);
// Now start the monitoring...
//
SlaveServerTransStatus transStatus = null;
while (!parentJob.isStopped() && waitingToFinish) {
try {
transStatus = remoteSlaveServer.getTransStatus(transMeta.getName(), carteObjectId, 0);
if (!transStatus.isRunning()) {
// The transformation is finished, get the result...
//
// get the status with the result ( we don't do it above because of changing PDI-15781)
transStatus = remoteSlaveServer.getTransStatus(transMeta.getName(), carteObjectId, 0, !isSuppressResultData());
Result remoteResult = transStatus.getResult();
result.clear();
result.add(remoteResult);
//
if (remoteResult.isStopped()) {
//
result.setNrErrors(result.getNrErrors() + 1);
}
// Make sure to clean up : write a log record etc, close any left-over sockets etc.
//
remoteSlaveServer.cleanupTransformation(transMeta.getName(), carteObjectId);
break;
}
} catch (Exception e1) {
logError(BaseMessages.getString(PKG, "JobTrans.Error.UnableContactSlaveServer", "" + remoteSlaveServer, transMeta.getName()), e1);
result.setNrErrors(result.getNrErrors() + 1L);
// Stop looking too, chances are too low the server will come back on-line
break;
}
// sleep for 2 seconds
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// Ignore
}
}
if (parentJob.isStopped()) {
//
if (transStatus == null || transStatus.isRunning()) {
// Try a remote abort ...
//
remoteSlaveServer.stopTransformation(transMeta.getName(), transStatus.getId());
// And a cleanup...
//
remoteSlaveServer.cleanupTransformation(transMeta.getName(), transStatus.getId());
// Set an error state!
//
result.setNrErrors(result.getNrErrors() + 1L);
}
}
} else {
// Execute this transformation on the local machine
//
// Create the transformation from meta-data
//
// trans = new Trans( transMeta, this );
final TransMeta meta = transMeta;
trans = new TransSupplier(transMeta, log, () -> new Trans(meta)).get();
trans.setParent(this);
// Pass the socket repository as early as possible...
//
trans.setSocketRepository(parentJob.getSocketRepository());
if (parentJob.getJobMeta().isBatchIdPassed()) {
trans.setPassedBatchId(parentJob.getPassedBatchId());
}
// set the parent job on the transformation, variables are taken from here...
//
trans.setParentJob(parentJob);
trans.setParentVariableSpace(parentJob);
trans.setLogLevel(transLogLevel);
trans.setPreviousResult(previousResult);
trans.setArguments(arguments);
// Mappings need the repository to load from
//
trans.setRepository(rep);
// inject the metaStore
trans.setMetaStore(metaStore);
// set gathering metrics state
trans.setGatheringMetrics(parentJob.isGatheringMetrics());
// First get the root job
//
Job rootJob = parentJob;
while (rootJob.getParentJob() != null) {
rootJob = rootJob.getParentJob();
}
// Get the start and end-date from the root job...
//
trans.setJobStartDate(rootJob.getStartDate());
trans.setJobEndDate(rootJob.getEndDate());
//
for (DelegationListener delegationListener : parentJob.getDelegationListeners()) {
// TODO: copy some settings in the job execution configuration, not strictly needed
// but the execution configuration information is useful in case of a job re-start
//
delegationListener.transformationDelegationStarted(trans, new TransExecutionConfiguration());
}
try {
// Start execution...
//
trans.execute(args);
// TODO is it possible to implement Observer pattern to avoid Thread.sleep here?
while (!trans.isFinished() && trans.getErrors() == 0) {
if (parentJob.isStopped()) {
trans.stopAll();
break;
} else {
try {
Thread.sleep(0, 500);
} catch (InterruptedException e) {
// Ignore errors
}
}
}
trans.waitUntilFinished();
if (parentJob.isStopped() || trans.getErrors() != 0) {
trans.stopAll();
result.setNrErrors(1);
}
updateResult(result);
if (setLogfile) {
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, KettleVFS.getFileObject(realLogFilename, this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
} catch (KettleException e) {
logError(BaseMessages.getString(PKG, "JobTrans.Error.UnablePrepareExec"), e);
result.setNrErrors(1);
}
}
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "JobTrans.ErrorUnableOpenTrans", e.getMessage()));
logError(Const.getStackTracker(e));
result.setNrErrors(1);
}
iteration++;
}
if (setLogfile) {
if (logChannelFileWriter != null) {
logChannelFileWriter.stopLogging();
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_LOG, logChannelFileWriter.getLogFile(), parentJob.getJobname(), getName());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
//
if (logChannelFileWriter.getException() != null) {
logError("Unable to open log file [" + getLogFilename() + "] : ");
logError(Const.getStackTracker(logChannelFileWriter.getException()));
result.setNrErrors(1);
result.setResult(false);
return result;
}
}
}
if (result.getNrErrors() == 0) {
result.setResult(true);
} else {
result.setResult(false);
}
return result;
}
use of org.pentaho.di.core.parameters.NamedParamsDefault in project pentaho-kettle by pentaho.
the class Pan method main.
public static void main(String[] a) throws Exception {
try {
KettleClientEnvironment.getInstance().setClient(KettleClientEnvironment.ClientType.PAN);
KettleEnvironment.init();
Locale.setDefault(LanguageChoice.getInstance().getDefaultLocale());
List<String> args = new ArrayList<>();
for (int i = 0; i < a.length; i++) {
if (a[i].length() > 0) {
args.add(a[i]);
}
}
// The options:
StringBuilder optionRepname, optionUsername, optionTrustUser, optionPassword, optionTransname, optionDirname;
StringBuilder optionFilename, optionLoglevel, optionLogfile, optionLogfileOld, optionListdir;
StringBuilder optionListtrans, optionListrep, optionExprep, optionNorep, optionSafemode;
StringBuilder optionVersion, optionJarFilename, optionListParam, optionMetrics, initialDir;
StringBuilder optionResultSetStepName, optionResultSetCopyNumber;
StringBuilder optionBase64Zip, optionUuid;
NamedParams optionParams = new NamedParamsDefault();
CommandLineOption maxLogLinesOption = new CommandLineOption("maxloglines", BaseMessages.getString(PKG, "Pan.CmdLine.MaxLogLines"), new StringBuilder());
CommandLineOption maxLogTimeoutOption = new CommandLineOption("maxlogtimeout", BaseMessages.getString(PKG, "Pan.CmdLine.MaxLogTimeout"), new StringBuilder());
CommandLineOption[] options = new CommandLineOption[] { new CommandLineOption("rep", BaseMessages.getString(PKG, "Pan.ComdLine.RepName"), optionRepname = new StringBuilder()), new CommandLineOption("user", BaseMessages.getString(PKG, "Pan.ComdLine.RepUsername"), optionUsername = new StringBuilder()), new CommandLineOption("trustuser", BaseMessages.getString(PKG, "Pan.ComdLine.RepUsername"), optionTrustUser = new StringBuilder()), new CommandLineOption("pass", BaseMessages.getString(PKG, "Pan.ComdLine.RepPassword"), optionPassword = new StringBuilder()), new CommandLineOption("trans", BaseMessages.getString(PKG, "Pan.ComdLine.TransName"), optionTransname = new StringBuilder()), new CommandLineOption("dir", BaseMessages.getString(PKG, "Pan.ComdLine.RepDir"), optionDirname = new StringBuilder()), new CommandLineOption("file", BaseMessages.getString(PKG, "Pan.ComdLine.XMLTransFile"), optionFilename = new StringBuilder()), new CommandLineOption("level", BaseMessages.getString(PKG, "Pan.ComdLine.LogLevel"), optionLoglevel = new StringBuilder()), new CommandLineOption("logfile", BaseMessages.getString(PKG, "Pan.ComdLine.LogFile"), optionLogfile = new StringBuilder()), new CommandLineOption("log", BaseMessages.getString(PKG, "Pan.ComdLine.LogOldFile"), optionLogfileOld = new StringBuilder(), false, true), new CommandLineOption("listdir", BaseMessages.getString(PKG, "Pan.ComdLine.ListDirRep"), optionListdir = new StringBuilder(), true, false), new CommandLineOption("listtrans", BaseMessages.getString(PKG, "Pan.ComdLine.ListTransDir"), optionListtrans = new StringBuilder(), true, false), new CommandLineOption("listrep", BaseMessages.getString(PKG, "Pan.ComdLine.ListReps"), optionListrep = new StringBuilder(), true, false), new CommandLineOption("exprep", BaseMessages.getString(PKG, "Pan.ComdLine.ExpObjectsXML"), optionExprep = new StringBuilder(), true, false), new CommandLineOption("norep", BaseMessages.getString(PKG, "Pan.ComdLine.NoRep"), optionNorep = new StringBuilder(), true, false), new CommandLineOption("safemode", BaseMessages.getString(PKG, "Pan.ComdLine.SafeMode"), optionSafemode = new StringBuilder(), true, false), new CommandLineOption("version", BaseMessages.getString(PKG, "Pan.ComdLine.Version"), optionVersion = new StringBuilder(), true, false), new CommandLineOption("jarfile", BaseMessages.getString(PKG, "Pan.ComdLine.JarFile"), optionJarFilename = new StringBuilder(), false, true), new CommandLineOption("param", BaseMessages.getString(PKG, "Pan.ComdLine.Param"), optionParams, false), new CommandLineOption("listparam", BaseMessages.getString(PKG, "Pan.ComdLine.ListParam"), optionListParam = new StringBuilder(), true, false), new CommandLineOption("initialDir", null, initialDir = new StringBuilder(), false, true), new CommandLineOption("stepname", "ResultSetStepName", optionResultSetStepName = new StringBuilder(), false, true), new CommandLineOption("copynum", "ResultSetCopyNumber", optionResultSetCopyNumber = new StringBuilder(), false, true), new CommandLineOption("zip", "Base64Zip", optionBase64Zip = new StringBuilder(), false, true), new CommandLineOption("uuid", "UUID", optionUuid = new StringBuilder(), false, true), new CommandLineOption("metrics", BaseMessages.getString(PKG, "Pan.ComdLine.Metrics"), optionMetrics = new StringBuilder(), true, false), maxLogLinesOption, maxLogTimeoutOption };
if (args.size() == 2) {
// 2 internal hidden argument (flag and value)
CommandLineOption.printUsage(options);
exitJVM(CommandExecutorCodes.Pan.CMD_LINE_PRINT.getCode());
}
// Parse the options...
if (!CommandLineOption.parseArguments(args, options, log)) {
log.logError(BaseMessages.getString(PKG, "Pan.Error.CommandLineError"));
exitJVM(CommandExecutorCodes.Pan.ERROR_LOADING_STEPS_PLUGINS.getCode());
}
Kitchen.configureLogging(maxLogLinesOption, maxLogTimeoutOption);
String kettleRepname = Const.getEnvironmentVariable(Const.KETTLE_REPOSITORY, null);
String kettleUsername = Const.getEnvironmentVariable(Const.KETTLE_USER, null);
String kettlePassword = Const.getEnvironmentVariable(Const.KETTLE_PASSWORD, null);
if (kettleRepname != null && kettleRepname.length() > 0) {
optionRepname = new StringBuilder(kettleRepname);
}
if (kettleUsername != null && kettleUsername.length() > 0) {
optionUsername = new StringBuilder(kettleUsername);
}
if (kettlePassword != null && kettlePassword.length() > 0) {
optionPassword = new StringBuilder(kettlePassword);
}
if (Utils.isEmpty(optionLogfile) && !Utils.isEmpty(optionLogfileOld)) {
// if the old style of logging name is filled in, and the new one is not
// overwrite the new by the old
optionLogfile = optionLogfileOld;
}
if (!Utils.isEmpty(optionLogfile)) {
// PDI-18724: this throws an exception if the given log file is not accessible
fileLoggingEventListener = new FileLoggingEventListener(optionLogfile.toString(), true);
KettleLogStore.getAppender().addLoggingEventListener(fileLoggingEventListener);
} else {
fileLoggingEventListener = null;
}
if (!Utils.isEmpty(optionLoglevel)) {
log.setLogLevel(LogLevel.getLogLevelForCode(optionLoglevel.toString()));
log.logMinimal(BaseMessages.getString(PKG, "Pan.Log.Loglevel", log.getLogLevel().getDescription()));
}
//
if (log.isDebug()) {
System.out.println("Arguments:");
for (int i = 0; i < options.length; i++) {
System.out.println(Const.rightPad(options[i].getOption(), 12) + " : " + options[i].getArgument());
}
System.out.println("");
}
if (getCommandExecutor() == null) {
// init
setCommandExecutor(new PanCommandExecutor(PKG, log));
}
if (!Utils.isEmpty(optionVersion)) {
getCommandExecutor().printVersion();
if (a.length == 1) {
exitJVM(CommandExecutorCodes.Pan.KETTLE_VERSION_PRINT.getCode());
}
}
Params.Builder builder = optionUuid.length() > 0 ? new Params.Builder(optionUuid.toString()) : new Params.Builder();
Params transParams = (builder).blockRepoConns(optionNorep.toString()).repoName(optionRepname.toString()).repoUsername(optionUsername.toString()).trustRepoUser(optionTrustUser.toString()).repoPassword(optionPassword.toString()).inputDir(optionDirname.toString()).inputFile(optionTransname.toString()).listRepoFiles(optionListtrans.toString()).listRepoDirs(optionListdir.toString()).exportRepo(optionExprep.toString()).localFile(optionFilename.toString()).localJarFile(optionJarFilename.toString()).localInitialDir(initialDir.toString()).listRepos(optionListrep.toString()).safeMode(optionSafemode.toString()).metrics(optionMetrics.toString()).listFileParams(optionListParam.toString()).logLevel("").maxLogLines("").maxLogTimeout("").logFile("").oldLogFile("").version("").resultSetStepName(optionResultSetStepName.toString()).resultSetCopyNumber(optionResultSetCopyNumber.toString()).base64Zip(optionBase64Zip.toString()).namedParams(optionParams).build();
Result result = getCommandExecutor().execute(transParams, args.toArray(new String[args.size()]));
exitJVM(result.getExitStatus());
} catch (Throwable t) {
t.printStackTrace();
exitJVM(CommandExecutorCodes.Pan.UNEXPECTED_ERROR.getCode());
}
}
use of org.pentaho.di.core.parameters.NamedParamsDefault in project pentaho-kettle by pentaho.
the class PanTest method testConfigureParameters.
@Test
public void testConfigureParameters() throws Exception {
TransMeta transMeta = new TransMeta();
transMeta.addParameterDefinition(TEST_PARAM_NAME, DEFAULT_PARAM_VALUE, "This tests a default parameter");
assertEquals("Default parameter was not set correctly on TransMeta", DEFAULT_PARAM_VALUE, transMeta.getParameterDefault(TEST_PARAM_NAME));
assertEquals("Parameter value should be blank in TransMeta", "", transMeta.getParameterValue(TEST_PARAM_NAME));
Trans trans = new Trans(transMeta);
assertEquals("Default parameter was not set correctly on Trans", DEFAULT_PARAM_VALUE, trans.getParameterDefault(TEST_PARAM_NAME));
assertEquals("Parameter value should be blank in Trans", "", trans.getParameterValue(TEST_PARAM_NAME));
NamedParams params = new NamedParamsDefault();
params.addParameterDefinition(TEST_PARAM_NAME, NOT_DEFAULT_PARAM_VALUE, "This tests a non-default parameter");
params.setParameterValue(TEST_PARAM_NAME, NOT_DEFAULT_PARAM_VALUE);
Pan.configureParameters(trans, params, transMeta);
assertEquals("Parameter was not set correctly in Trans", NOT_DEFAULT_PARAM_VALUE, trans.getParameterValue(TEST_PARAM_NAME));
assertEquals("Parameter was not set correctly in TransMeta", NOT_DEFAULT_PARAM_VALUE, transMeta.getParameterValue(TEST_PARAM_NAME));
}
use of org.pentaho.di.core.parameters.NamedParamsDefault in project pentaho-kettle by pentaho.
the class AbstractMetaTest method testGetSetParameterValue.
@Test
public void testGetSetParameterValue() throws Exception {
assertNull(meta.getParameterValue("var1"));
assertNull(meta.getParameterDefault("var1"));
assertNull(meta.getParameterDescription("var1"));
meta.setParameterValue("var1", "y");
// Values for new parameters must be added by addParameterDefinition
assertNull(meta.getParameterValue("var1"));
assertNull(meta.getParameterDefault("var1"));
assertNull(meta.getParameterDescription("var1"));
meta.addParameterDefinition("var2", "z", "My Description");
assertEquals("", meta.getParameterValue("var2"));
assertEquals("z", meta.getParameterDefault("var2"));
assertEquals("My Description", meta.getParameterDescription("var2"));
meta.setParameterValue("var2", "y");
assertEquals("y", meta.getParameterValue("var2"));
assertEquals("z", meta.getParameterDefault("var2"));
String[] params = meta.listParameters();
assertNotNull(params);
// clearParameters() just clears their values, not their presence
meta.clearParameters();
assertEquals("", meta.getParameterValue("var2"));
// eraseParameters() clears the list of parameters
meta.eraseParameters();
assertNull(meta.getParameterValue("var1"));
NamedParams newParams = new NamedParamsDefault();
newParams.addParameterDefinition("var3", "default", "description");
newParams.setParameterValue("var3", "a");
newParams.addParameterDefinition("emptyVar", "", "emptyDesc");
newParams.setParameterValue("emptyVar", "");
meta.copyParametersFrom(newParams);
meta.activateParameters();
assertEquals("default", meta.getParameterDefault("var3"));
assertEquals("", meta.getParameterDefault("emptyVar"));
}
Aggregations