use of org.pentaho.di.resource.TopLevelResource in project pentaho-kettle by pentaho.
the class KitchenCommandExecutor method execute.
public int execute(String repoName, String noRepo, String username, String trustUser, String password, String dirName, String filename, String jobName, String listJobs, String listDirs, String exportRepo, String initialDir, String listRepos, String listParams, NamedParams params, NamedParams customParams, String[] arguments) throws Throwable {
getLog().logMinimal(BaseMessages.getString(getPkgClazz(), "Kitchen.Log.Starting"));
Date start = Calendar.getInstance().getTime();
logDebug("Kitchen.Log.AllocateNewJob");
Job job = null;
// In case we use a repository...
Repository repository = null;
try {
if (getMetaStore() == null) {
setMetaStore(createDefaultMetastore());
}
// Read kettle job specified on command-line?
if (!Utils.isEmpty(repoName) || !Utils.isEmpty(filename)) {
logDebug("Kitchen.Log.ParsingCommandLine");
if (!Utils.isEmpty(repoName) && !YES.equalsIgnoreCase(noRepo)) {
/**
* if set, _trust_user_ needs to be considered. See pur-plugin's:
*
* @link https://github.com/pentaho/pentaho-kettle/blob/8.0.0.0-R/plugins/pur/core/src/main/java/org/pentaho/di/repository/pur/PurRepositoryConnector.java#L97-L101
* @link https://github.com/pentaho/pentaho-kettle/blob/8.0.0.0-R/plugins/pur/core/src/main/java/org/pentaho/di/repository/pur/WebServiceManager.java#L130-L133
*/
if (YES.equalsIgnoreCase(trustUser)) {
System.setProperty("pentaho.repository.client.attemptTrust", YES);
}
// In case we use a repository...
// some commands are to load a Trans from the repo; others are merely to print some repo-related information
RepositoryMeta repositoryMeta = loadRepositoryConnection(repoName, "Kitchen.Log.LoadingRep", "Kitchen.Error.NoRepDefinied", "Kitchen.Log.FindingRep");
repository = establishRepositoryConnection(repositoryMeta, username, password, RepositoryOperation.EXECUTE_JOB);
job = executeRepositoryBasedCommand(repository, repositoryMeta, dirName, jobName, listJobs, listDirs);
}
// Try to load if from file anyway.
if (!Utils.isEmpty(filename) && job == null) {
// Try to load the job from file, even if it failed to load from the repository
job = executeFilesystemBasedCommand(initialDir, filename);
}
} else if (YES.equalsIgnoreCase(listRepos)) {
// list the repositories placed at repositories.xml
printRepositories(loadRepositoryInfo("Kitchen.Log.ListRep", "Kitchen.Error.NoRepDefinied"));
}
} catch (KettleException e) {
job = null;
if (repository != null) {
repository.disconnect();
}
System.out.println(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.StopProcess", e.getMessage()));
}
if (job == null) {
if (!YES.equalsIgnoreCase(listJobs) && !YES.equalsIgnoreCase(listDirs) && !YES.equalsIgnoreCase(listRepos)) {
System.out.println(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.canNotLoadJob"));
}
return CommandExecutorCodes.Kitchen.COULD_NOT_LOAD_JOB.getCode();
}
if (!Utils.isEmpty(exportRepo)) {
try {
// Export the resources linked to the currently loaded file...
TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface(exportRepo, job.getJobMeta(), job, repository, getMetaStore());
String launchFile = topLevelResource.getResourceName();
String message = ResourceUtil.getExplanation(exportRepo, launchFile, job.getJobMeta());
System.out.println();
System.out.println(message);
// Setting the list parameters option will make kitchen exit below in the parameters section
listParams = YES;
} catch (Exception e) {
System.out.println(Const.getStackTracker(e));
return CommandExecutorCodes.Kitchen.UNEXPECTED_ERROR.getCode();
}
}
Result result = null;
int returnCode = CommandExecutorCodes.Kitchen.SUCCESS.getCode();
try {
// Set the command line arguments on the job ...
job.setArguments(arguments != null ? arguments : null);
job.initializeVariablesFrom(null);
job.setLogLevel(getLog().getLogLevel());
job.getJobMeta().setInternalKettleVariables(job);
job.setRepository(repository);
job.getJobMeta().setRepository(repository);
job.getJobMeta().setMetaStore(getMetaStore());
// Map the command line named parameters to the actual named parameters. Skip for
// the moment any extra command line parameter not known in the job.
String[] jobParams = job.getJobMeta().listParameters();
for (String param : jobParams) {
String value = params.getParameterValue(param);
if (value != null) {
job.getJobMeta().setParameterValue(param, value);
}
}
job.copyParametersFrom(job.getJobMeta());
// Put the parameters over the already defined variable space. Parameters get priority.
job.activateParameters();
// Set custom options in the job extension map as Strings
for (String optionName : customParams.listParameters()) {
String optionValue = customParams.getParameterValue(optionName);
if (optionName != null && optionValue != null) {
job.getExtensionDataMap().put(optionName, optionValue);
}
}
// List the parameters defined in this job, then simply exit...
if (YES.equalsIgnoreCase(listParams)) {
printJobParameters(job);
// same as the other list options
return CommandExecutorCodes.Kitchen.COULD_NOT_LOAD_JOB.getCode();
}
job.start();
job.waitUntilFinished();
// Execute the selected job.
result = job.getResult();
} finally {
if (repository != null) {
repository.disconnect();
}
if (YES.equalsIgnoreCase(trustUser)) {
// we set it, now we sanitize it
System.clearProperty("pentaho.repository.client.attemptTrust");
}
}
getLog().logMinimal(BaseMessages.getString(getPkgClazz(), "Kitchen.Log.Finished"));
if (result != null && result.getNrErrors() != 0) {
getLog().logError(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.FinishedWithErrors"));
returnCode = CommandExecutorCodes.Kitchen.ERRORS_DURING_PROCESSING.getCode();
}
Date stop = Calendar.getInstance().getTime();
calculateAndPrintElapsedTime(start, stop, "Kitchen.Log.StartStop", "Kitchen.Log.ProcessEndAfter", "Kitchen.Log.ProcessEndAfterLong", "Kitchen.Log.ProcessEndAfterLonger", "Kitchen.Log.ProcessEndAfterLongest");
return returnCode;
}
use of org.pentaho.di.resource.TopLevelResource in project pentaho-kettle by pentaho.
the class KitchenCommandExecutor method execute.
public Result execute(Params params, String[] arguments) throws Throwable {
getLog().logMinimal(BaseMessages.getString(getPkgClazz(), "Kitchen.Log.Starting"));
logDebug("Kitchen.Log.AllocateNewJob");
Job job = null;
// In case we use a repository...
Repository repository = null;
try {
if (getMetaStore() == null) {
setMetaStore(createDefaultMetastore());
}
// Read kettle job specified on command-line?
if (!Utils.isEmpty(params.getRepoName()) || !Utils.isEmpty(params.getLocalFile())) {
logDebug("Kitchen.Log.ParsingCommandLine");
if (!Utils.isEmpty(params.getRepoName()) && !isEnabled(params.getBlockRepoConns())) {
/**
* if set, _trust_user_ needs to be considered. See pur-plugin's:
*
* @link https://github.com/pentaho/pentaho-kettle/blob/8.0.0.0-R/plugins/pur/core/src/main/java/org/pentaho/di/repository/pur/PurRepositoryConnector.java#L97-L101
* @link https://github.com/pentaho/pentaho-kettle/blob/8.0.0.0-R/plugins/pur/core/src/main/java/org/pentaho/di/repository/pur/WebServiceManager.java#L130-L133
*/
if (isEnabled(params.getTrustRepoUser())) {
System.setProperty("pentaho.repository.client.attemptTrust", YES);
}
// In case we use a repository...
// some commands are to load a Trans from the repo; others are merely to print some repo-related information
RepositoryMeta repositoryMeta = loadRepositoryConnection(params.getRepoName(), "Kitchen.Log.LoadingRep", "Kitchen.Error.NoRepDefinied", "Kitchen.Log.FindingRep");
if (repositoryMeta == null) {
System.out.println(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.CanNotConnectRep"));
return exitWithStatus(CommandExecutorCodes.Kitchen.COULD_NOT_LOAD_JOB.getCode());
}
logDebug("Kitchen.Log.CheckUserPass");
repository = establishRepositoryConnection(repositoryMeta, params.getRepoUsername(), params.getRepoPassword(), RepositoryOperation.EXECUTE_JOB);
// If so, nothing else is needed ( other than executing the actual requested operation )
if (isEnabled(params.getListRepoFiles()) || isEnabled(params.getListRepoDirs())) {
executeRepositoryBasedCommand(repository, params.getInputDir(), params.getListRepoFiles(), params.getListRepoDirs());
return exitWithStatus(CommandExecutorCodes.Kitchen.SUCCESS.getCode());
}
job = loadJobFromRepository(repository, params.getInputDir(), params.getInputFile());
}
// Try to load if from file
if (job == null) {
// Try to load the job from file, even if it failed to load from the repository
job = loadJobFromFilesystem(params.getLocalInitialDir(), params.getLocalFile(), params.getBase64Zip());
}
} else if (isEnabled(params.getListRepos())) {
// list the repositories placed at repositories.xml
printRepositories(loadRepositoryInfo("Kitchen.Log.ListRep", "Kitchen.Error.NoRepDefinied"));
}
} catch (KettleException e) {
job = null;
if (repository != null) {
repository.disconnect();
}
System.out.println(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.StopProcess", e.getMessage()));
}
if (job == null) {
if (!isEnabled(params.getListRepoFiles()) && !isEnabled(params.getListRepoDirs()) && !isEnabled(params.getListRepos())) {
System.out.println(BaseMessages.getString(getPkgClazz(), "Kitchen.Error.canNotLoadJob"));
}
return exitWithStatus(CommandExecutorCodes.Kitchen.COULD_NOT_LOAD_JOB.getCode(), job);
}
if (!Utils.isEmpty(params.getExportRepo())) {
try {
// Export the resources linked to the currently loaded file...
TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface(params.getExportRepo(), job.getJobMeta(), job, repository, getMetaStore());
String launchFile = topLevelResource.getResourceName();
String message = ResourceUtil.getExplanation(params.getExportRepo(), launchFile, job.getJobMeta());
System.out.println();
System.out.println(message);
// Setting the list parameters option will make kitchen exit below in the parameters section
(params).setListFileParams(YES);
} catch (Exception e) {
System.out.println(Const.getStackTracker(e));
return exitWithStatus(CommandExecutorCodes.Kitchen.UNEXPECTED_ERROR.getCode());
}
}
Date start = Calendar.getInstance().getTime();
try {
// Set the command line arguments on the job ...
job.setArguments(arguments);
job.initializeVariablesFrom(null);
job.setLogLevel(getLog().getLogLevel());
job.getJobMeta().setInternalKettleVariables(job);
job.setRepository(repository);
job.getJobMeta().setRepository(repository);
job.getJobMeta().setMetaStore(getMetaStore());
// Map the command line named parameters to the actual named parameters. Skip for
// the moment any extra command line parameter not known in the job.
String[] jobParams = job.getJobMeta().listParameters();
for (String param : jobParams) {
try {
String value = params.getNamedParams().getParameterValue(param);
if (value != null) {
job.getJobMeta().setParameterValue(param, value);
}
} catch (UnknownParamException e) {
/* no-op */
}
}
job.copyParametersFrom(job.getJobMeta());
// Put the parameters over the already defined variable space. Parameters get priority.
job.activateParameters();
// Set custom options in the job extension map as Strings
for (String optionName : params.getCustomNamedParams().listParameters()) {
try {
String optionValue = params.getCustomNamedParams().getParameterValue(optionName);
if (optionName != null && optionValue != null) {
job.getExtensionDataMap().put(optionName, optionValue);
}
} catch (UnknownParamException e) {
/* no-op */
}
}
// List the parameters defined in this job, then simply exit...
if (isEnabled(params.getListFileParams())) {
printJobParameters(job);
// same as the other list options
return exitWithStatus(CommandExecutorCodes.Kitchen.COULD_NOT_LOAD_JOB.getCode());
}
// Execute the selected job.
job.start();
job.waitUntilFinished();
// get the execution result
setResult(job.getResult());
} finally {
if (repository != null) {
repository.disconnect();
}
if (isEnabled(params.getTrustRepoUser())) {
// we set it, now we sanitize it
System.clearProperty("pentaho.repository.client.attemptTrust");
}
}
getLog().logMinimal(BaseMessages.getString(getPkgClazz(), "Kitchen.Log.Finished"));
int returnCode = getReturnCode();
Date stop = Calendar.getInstance().getTime();
calculateAndPrintElapsedTime(start, stop, "Kitchen.Log.StartStop", "Kitchen.Log.ProcessEndAfter", "Kitchen.Log.ProcessEndAfterLong", "Kitchen.Log.ProcessEndAfterLonger", "Kitchen.Log.ProcessEndAfterLongest");
getResult().setElapsedTimeMillis(stop.getTime() - start.getTime());
return exitWithStatus(returnCode);
}
use of org.pentaho.di.resource.TopLevelResource in project pentaho-kettle by pentaho.
the class Spoon method exportAllFileRepository.
/**
* Export this job or transformation including all depending resources to a single ZIP file containing a file
* repository.
*/
public void exportAllFileRepository() {
ResourceExportInterface resourceExportInterface = getActiveTransformation();
if (resourceExportInterface == null) {
resourceExportInterface = getActiveJob();
}
if (resourceExportInterface == null) {
// nothing to do here, prevent an NPE
return;
}
//
try {
String zipFilename = null;
while (Utils.isEmpty(zipFilename)) {
FileDialog dialog = new FileDialog(shell, SWT.SAVE);
dialog.setText(BaseMessages.getString(PKG, "Spoon.ExportResourceSelectZipFile"));
dialog.setFilterExtensions(new String[] { "*.zip;*.ZIP", "*" });
dialog.setFilterNames(new String[] { BaseMessages.getString(PKG, "System.FileType.ZIPFiles"), BaseMessages.getString(PKG, "System.FileType.AllFiles") });
setFilterPath(dialog);
if (dialog.open() != null) {
lastDirOpened = dialog.getFilterPath();
zipFilename = dialog.getFilterPath() + Const.FILE_SEPARATOR + dialog.getFileName();
FileObject zipFileObject = KettleVFS.getFileObject(zipFilename);
if (zipFileObject.exists()) {
MessageBox box = new MessageBox(shell, SWT.YES | SWT.NO | SWT.CANCEL);
box.setMessage(BaseMessages.getString(PKG, "Spoon.ExportResourceZipFileExists.Message", zipFilename));
box.setText(BaseMessages.getString(PKG, "Spoon.ExportResourceZipFileExists.Title"));
int answer = box.open();
if (answer == SWT.CANCEL) {
return;
}
if (answer == SWT.NO) {
zipFilename = null;
}
}
} else {
return;
}
}
// Export the resources linked to the currently loaded file...
//
TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface(zipFilename, resourceExportInterface, (VariableSpace) resourceExportInterface, rep, metaStore);
String message = ResourceUtil.getExplanation(zipFilename, topLevelResource.getResourceName(), resourceExportInterface);
/*
* // Add the ZIP file as a repository to the repository list... // RepositoriesMeta repositoriesMeta = new
* RepositoriesMeta(); repositoriesMeta.readData();
*
* KettleFileRepositoryMeta fileRepositoryMeta = new KettleFileRepositoryMeta(
* KettleFileRepositoryMeta.REPOSITORY_TYPE_ID, "Export " + baseFileName, "Export to file : " + zipFilename,
* "zip://" + zipFilename + "!"); fileRepositoryMeta.setReadOnly(true); // A ZIP file is read-only int nr = 2;
* String baseName = fileRepositoryMeta.getName(); while
* (repositoriesMeta.findRepository(fileRepositoryMeta.getName()) != null) { fileRepositoryMeta.setName(baseName +
* " " + nr); nr++; }
*
* repositoriesMeta.addRepository(fileRepositoryMeta); repositoriesMeta.writeData();
*/
// Show some information concerning all this work...
//
EnterTextDialog enterTextDialog = new EnterTextDialog(shell, BaseMessages.getString(PKG, "Spoon.Dialog.ResourceSerialized"), BaseMessages.getString(PKG, "Spoon.Dialog.ResourceSerializedSuccesfully"), message);
enterTextDialog.setReadOnly();
enterTextDialog.open();
} catch (Exception e) {
new ErrorDialog(shell, BaseMessages.getString(PKG, "Spoon.Error"), BaseMessages.getString(PKG, "Spoon.ErrorExportingFile"), e);
}
}
use of org.pentaho.di.resource.TopLevelResource in project pentaho-kettle by pentaho.
the class Spoon method exportAllXMLFile.
/**
* Export this job or transformation including all depending resources to a single zip file.
*/
public void exportAllXMLFile() {
ResourceExportInterface resourceExportInterface = getActiveTransformation();
if (resourceExportInterface == null) {
resourceExportInterface = getActiveJob();
}
if (resourceExportInterface == null) {
// nothing to do here, prevent an NPE
return;
}
//
try {
String zipFilename = null;
while (Utils.isEmpty(zipFilename)) {
FileDialog dialog = new FileDialog(shell, SWT.SAVE);
dialog.setText(BaseMessages.getString(PKG, "Spoon.ExportResourceSelectZipFile"));
dialog.setFilterExtensions(new String[] { "*.zip;*.ZIP", "*" });
dialog.setFilterNames(new String[] { BaseMessages.getString(PKG, "System.FileType.ZIPFiles"), BaseMessages.getString(PKG, "System.FileType.AllFiles") });
setFilterPath(dialog);
if (dialog.open() != null) {
lastDirOpened = dialog.getFilterPath();
zipFilename = dialog.getFilterPath() + Const.FILE_SEPARATOR + dialog.getFileName();
FileObject zipFileObject = KettleVFS.getFileObject(zipFilename);
if (zipFileObject.exists()) {
MessageBox box = new MessageBox(shell, SWT.YES | SWT.NO | SWT.CANCEL);
box.setMessage(BaseMessages.getString(PKG, "Spoon.ExportResourceZipFileExists.Message", zipFilename));
box.setText(BaseMessages.getString(PKG, "Spoon.ExportResourceZipFileExists.Title"));
int answer = box.open();
if (answer == SWT.CANCEL) {
return;
}
if (answer == SWT.NO) {
zipFilename = null;
}
}
} else {
return;
}
}
// Export the resources linked to the currently loaded file...
//
TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface(zipFilename, resourceExportInterface, (VariableSpace) resourceExportInterface, rep, metaStore);
String message = ResourceUtil.getExplanation(zipFilename, topLevelResource.getResourceName(), resourceExportInterface);
/*
* // Add the ZIP file as a repository to the repository list... // RepositoriesMeta repositoriesMeta = new
* RepositoriesMeta(); repositoriesMeta.readData();
*
* KettleFileRepositoryMeta fileRepositoryMeta = new KettleFileRepositoryMeta(
* KettleFileRepositoryMeta.REPOSITORY_TYPE_ID, "Export " + baseFileName, "Export to file : " + zipFilename,
* "zip://" + zipFilename + "!"); fileRepositoryMeta.setReadOnly(true); // A ZIP file is read-only int nr = 2;
* String baseName = fileRepositoryMeta.getName(); while
* (repositoriesMeta.findRepository(fileRepositoryMeta.getName()) != null) { fileRepositoryMeta.setName(baseName +
* " " + nr); nr++; }
*
* repositoriesMeta.addRepository(fileRepositoryMeta); repositoriesMeta.writeData();
*/
// Show some information concerning all this work...
EnterTextDialog enterTextDialog = new EnterTextDialog(shell, BaseMessages.getString(PKG, "Spoon.Dialog.ResourceSerialized"), BaseMessages.getString(PKG, "Spoon.Dialog.ResourceSerializedSuccesfully"), message);
enterTextDialog.setReadOnly();
enterTextDialog.open();
} catch (Exception e) {
new ErrorDialog(shell, BaseMessages.getString(PKG, "Spoon.Error"), BaseMessages.getString(PKG, "Spoon.ErrorExportingFile"), e);
}
}
use of org.pentaho.di.resource.TopLevelResource in project pentaho-kettle by pentaho.
the class Job method sendToSlaveServer.
/**
* Send to slave server.
*
* @param jobMeta
* the job meta
* @param executionConfiguration
* the execution configuration
* @param repository
* the repository
* @param metaStore
* the metaStore
* @return the string
* @throws KettleException
* the kettle exception
*/
public static String sendToSlaveServer(JobMeta jobMeta, JobExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore) throws KettleException {
String carteObjectId;
SlaveServer slaveServer = executionConfiguration.getRemoteServer();
if (slaveServer == null) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Log.NoSlaveServerSpecified"));
}
if (Utils.isEmpty(jobMeta.getName())) {
throw new KettleException(BaseMessages.getString(PKG, "Job.Log.UniqueJobName"));
}
// Align logging levels between execution configuration and remote server
slaveServer.getLogChannel().setLogLevel(executionConfiguration.getLogLevel());
try {
//
for (String var : Const.INTERNAL_TRANS_VARIABLES) {
executionConfiguration.getVariables().put(var, jobMeta.getVariable(var));
}
for (String var : Const.INTERNAL_JOB_VARIABLES) {
executionConfiguration.getVariables().put(var, jobMeta.getVariable(var));
}
if (executionConfiguration.isPassingExport()) {
// First export the job... slaveServer.getVariable("MASTER_HOST")
//
FileObject tempFile = KettleVFS.createTempFile("jobExport", ".zip", System.getProperty("java.io.tmpdir"), jobMeta);
TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface(tempFile.getName().toString(), jobMeta, jobMeta, repository, metaStore, executionConfiguration.getXML(), CONFIGURATION_IN_EXPORT_FILENAME);
// Send the zip file over to the slave server...
String result = slaveServer.sendExport(topLevelResource.getArchiveName(), RegisterPackageServlet.TYPE_JOB, topLevelResource.getBaseResourceName());
WebResult webResult = WebResult.fromXMLString(result);
if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) {
throw new KettleException("There was an error passing the exported job to the remote server: " + Const.CR + webResult.getMessage());
}
carteObjectId = webResult.getId();
} else {
String xml = new JobConfiguration(jobMeta, executionConfiguration).getXML();
String reply = slaveServer.sendXML(xml, RegisterJobServlet.CONTEXT_PATH + "/?xml=Y");
WebResult webResult = WebResult.fromXMLString(reply);
if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) {
throw new KettleException("There was an error posting the job on the remote server: " + Const.CR + webResult.getMessage());
}
carteObjectId = webResult.getId();
}
// Start the job
//
String reply = slaveServer.execService(StartJobServlet.CONTEXT_PATH + "/?name=" + URLEncoder.encode(jobMeta.getName(), "UTF-8") + "&xml=Y&id=" + carteObjectId);
WebResult webResult = WebResult.fromXMLString(reply);
if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) {
throw new KettleException("There was an error starting the job on the remote server: " + Const.CR + webResult.getMessage());
}
return carteObjectId;
} catch (KettleException ke) {
throw ke;
} catch (Exception e) {
throw new KettleException(e);
}
}
Aggregations