use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class TableOutputMetaTest method testReadRep.
/**
* @see
* <a href=http://jira.pentaho.com/browse/BACKLOG-377>http://jira.pentaho.com/browse/BACKLOG-377</a>
* @throws KettleException
*/
@Test
public void testReadRep() throws KettleException {
// check variable
String commitSize = "${test}";
Repository rep = new MemoryRepository();
rep.saveStepAttribute(null, null, "commit", commitSize);
TableOutputMeta tableOutputMeta = new TableOutputMeta();
tableOutputMeta.readRep(rep, metaStore, null, databases);
assertEquals(commitSize, tableOutputMeta.getCommitSize());
// check integer size
int commitSizeInt = 1;
Repository rep2 = new MemoryRepository();
rep2.saveStepAttribute(null, null, "commit", commitSizeInt);
TableOutputMeta tableOutputMeta2 = new TableOutputMeta();
tableOutputMeta2.readRep(rep2, metaStore, null, databases);
assertEquals(String.valueOf(commitSizeInt), tableOutputMeta2.getCommitSize());
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class PanCommandExecutor method execute.
public Result execute(final Params params, String[] arguments) throws Throwable {
getLog().logMinimal(BaseMessages.getString(getPkgClazz(), "Pan.Log.StartingToRun"));
logDebug("Pan.Log.AllocatteNewTrans");
Trans trans = null;
// In case we use a repository...
Repository repository = null;
try {
if (getMetaStore() == null) {
setMetaStore(createDefaultMetastore());
}
logDebug("Pan.Log.StartingToLookOptions");
// Read kettle transformation specified
if (!Utils.isEmpty(params.getRepoName()) || !Utils.isEmpty(params.getLocalFile()) || !Utils.isEmpty(params.getLocalJarFile())) {
logDebug("Pan.Log.ParsingCommandline");
if (!Utils.isEmpty(params.getRepoName()) && !isEnabled(params.getBlockRepoConns())) {
/**
* if set, _trust_user_ needs to be considered. See pur-plugin's:
*
* @link https://github.com/pentaho/pentaho-kettle/blob/8.0.0.0-R/plugins/pur/core/src/main/java/org/pentaho/di/repository/pur/PurRepositoryConnector.java#L97-L101
* @link https://github.com/pentaho/pentaho-kettle/blob/8.0.0.0-R/plugins/pur/core/src/main/java/org/pentaho/di/repository/pur/WebServiceManager.java#L130-L133
*/
if (isEnabled(params.getTrustRepoUser())) {
System.setProperty("pentaho.repository.client.attemptTrust", YES);
}
// In case we use a repository...
// some commands are to load a Trans from the repo; others are merely to output some repo-related information
RepositoryMeta repositoryMeta = loadRepositoryConnection(params.getRepoName(), "Pan.Log.LoadingAvailableRep", "Pan.Error.NoRepsDefined", "Pan.Log.FindingRep");
if (repositoryMeta == null) {
System.out.println(BaseMessages.getString(getPkgClazz(), "Pan.Error.CanNotConnectRep"));
return exitWithStatus(CommandExecutorCodes.Pan.COULD_NOT_LOAD_TRANS.getCode());
}
logDebug("Pan.Log.CheckSuppliedUserPass");
repository = establishRepositoryConnection(repositoryMeta, params.getRepoUsername(), params.getRepoPassword(), RepositoryOperation.EXECUTE_TRANSFORMATION);
// If so, nothing else is needed ( other than executing the actual requested operation )
if (isEnabled(params.getListRepoFiles()) || isEnabled(params.getListRepoDirs()) || !Utils.isEmpty(params.getExportRepo())) {
executeRepositoryBasedCommand(repository, params.getInputDir(), params.getListRepoFiles(), params.getListRepoDirs(), params.getExportRepo());
return exitWithStatus(CommandExecutorCodes.Pan.SUCCESS.getCode());
}
trans = loadTransFromRepository(repository, params.getInputDir(), params.getInputFile());
}
// You could implement some fail-over mechanism this way.
if (trans == null) {
trans = loadTransFromFilesystem(params.getLocalInitialDir(), params.getLocalFile(), params.getLocalJarFile(), params.getBase64Zip());
}
}
if (isEnabled(params.getListRepos())) {
// list the repositories placed at repositories.xml
printRepositories(loadRepositoryInfo("Pan.Log.LoadingAvailableRep", "Pan.Error.NoRepsDefined"));
}
} catch (Exception e) {
trans = null;
System.out.println(BaseMessages.getString(getPkgClazz(), "Pan.Error.ProcessStopError", e.getMessage()));
e.printStackTrace();
if (repository != null) {
repository.disconnect();
}
return exitWithStatus(CommandExecutorCodes.Pan.ERRORS_DURING_PROCESSING.getCode(), trans);
}
if (trans == null) {
if (!isEnabled(params.getListRepoFiles()) && !isEnabled(params.getListRepoDirs()) && !isEnabled(params.getListRepos()) && Utils.isEmpty(params.getExportRepo())) {
System.out.println(BaseMessages.getString(getPkgClazz(), "Pan.Error.CanNotLoadTrans"));
return exitWithStatus(CommandExecutorCodes.Pan.COULD_NOT_LOAD_TRANS.getCode());
} else {
return exitWithStatus(CommandExecutorCodes.Pan.SUCCESS.getCode());
}
}
// capture execution start time
Date start = Calendar.getInstance().getTime();
try {
trans.setLogLevel(getLog().getLogLevel());
configureParameters(trans, params.getNamedParams(), trans.getTransMeta());
// run in safe mode if requested
trans.setSafeModeEnabled(isEnabled(params.getSafeMode()));
// enable kettle metric gathering if requested
trans.setGatheringMetrics(isEnabled(params.getMetrics()));
// List the parameters defined in this transformation, and then simply exit
if (isEnabled(params.getListFileParams())) {
printTransformationParameters(trans);
// same as the other list options
return exitWithStatus(CommandExecutorCodes.Pan.COULD_NOT_LOAD_TRANS.getCode());
}
final List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>();
// allocate & run the required sub-threads
try {
trans.prepareExecution(arguments);
if (!StringUtils.isEmpty(params.getResultSetStepName())) {
int copyNr = NumberUtils.isNumber(params.getResultSetCopyNumber()) ? Integer.parseInt(params.getResultSetCopyNumber()) : 0;
logDebug("Collecting result-set for step '" + params.getResultSetStepName() + "' and copy number " + copyNr);
StepInterface step = null;
if ((step = trans.findRunThread(params.getResultSetStepName())) != null && step.getCopy() == copyNr) {
step.addRowListener(new RowAdapter() {
@Override
public void rowWrittenEvent(RowMetaInterface rowMeta, Object[] data) throws KettleStepException {
rows.add(new RowMetaAndData(rowMeta, data));
}
});
}
}
trans.startThreads();
} catch (KettleException ke) {
logDebug(ke.getLocalizedMessage());
System.out.println(BaseMessages.getString(getPkgClazz(), "Pan.Error.UnablePrepareInitTrans"));
return exitWithStatus(CommandExecutorCodes.Pan.UNABLE_TO_PREP_INIT_TRANS.getCode(), trans);
}
// Give the transformation up to 10 seconds to finish execution
waitUntilFinished(trans, 100);
if (trans.isRunning()) {
getLog().logError(BaseMessages.getString(getPkgClazz(), "Pan.Log.NotStopping"));
}
getLog().logMinimal(BaseMessages.getString(getPkgClazz(), "Pan.Log.Finished"));
// capture execution stop time
Date stop = Calendar.getInstance().getTime();
trans.setResultRows(rows);
// get the execution result
setResult(trans.getResult());
int completionTimeSeconds = calculateAndPrintElapsedTime(start, stop, "Pan.Log.StartStop", "Pan.Log.ProcessingEndAfter", "Pan.Log.ProcessingEndAfterLong", "Pan.Log.ProcessingEndAfterLonger", "Pan.Log.ProcessingEndAfterLongest");
getResult().setElapsedTimeMillis(stop.getTime() - start.getTime());
if (getResult().getNrErrors() == 0) {
trans.printStats(completionTimeSeconds);
return exitWithStatus(CommandExecutorCodes.Pan.SUCCESS.getCode());
} else {
String transJVMExitCode = trans.getVariable(Const.KETTLE_TRANS_PAN_JVM_EXIT_CODE);
// If the trans has a return code to return to the OS, then we exit with that
if (!Utils.isEmpty(transJVMExitCode)) {
try {
return exitWithStatus(Integer.parseInt(transJVMExitCode));
} catch (NumberFormatException nfe) {
getLog().logError(BaseMessages.getString(getPkgClazz(), "Pan.Error.TransJVMExitCodeInvalid", Const.KETTLE_TRANS_PAN_JVM_EXIT_CODE, transJVMExitCode));
getLog().logError(BaseMessages.getString(getPkgClazz(), "Pan.Log.JVMExitCode", "1"));
return exitWithStatus(CommandExecutorCodes.Pan.ERRORS_DURING_PROCESSING.getCode());
}
} else {
// the trans does not have a return code.
return exitWithStatus(CommandExecutorCodes.Pan.ERRORS_DURING_PROCESSING.getCode());
}
}
} catch (KettleException ke) {
System.out.println(BaseMessages.getString(getPkgClazz(), "Pan.Log.ErrorOccurred", "" + ke.getMessage()));
getLog().logError(BaseMessages.getString(getPkgClazz(), "Pan.Log.UnexpectedErrorOccurred", "" + ke.getMessage()));
return exitWithStatus(CommandExecutorCodes.Pan.UNEXPECTED_ERROR.getCode(), trans);
} finally {
if (repository != null) {
repository.disconnect();
}
if (isEnabled(params.getTrustRepoUser())) {
// we set it, now we sanitize it
System.clearProperty("pentaho.repository.client.attemptTrust");
}
}
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class PanCommandExecutorTest method testMetastoreFromRepoAddedIn.
@Test
public void testMetastoreFromRepoAddedIn() throws Exception {
// mock Trans loading from repo
TransMeta t = new TransMeta(getClass().getResource(SAMPLE_KTR).getPath());
when(repository.loadTransformation(anyString(), anyObject(), anyObject(), anyBoolean(), anyString())).thenReturn(t);
// test
Trans trans = mockedPanCommandExecutor.loadTransFromRepository(repository, "", SAMPLE_KTR);
assertNotNull(trans);
assertNotNull(trans.getMetaStore());
assertTrue(trans.getMetaStore() instanceof DelegatingMetaStore);
assertNotNull(((DelegatingMetaStore) trans.getMetaStore()).getMetaStoreList());
assertEquals(2, ((DelegatingMetaStore) trans.getMetaStore()).getMetaStoreList().size());
assertTrue(((DelegatingMetaStore) trans.getMetaStore()).getMetaStoreList().stream().anyMatch(m -> {
try {
return REPO_METASTORE_NAME.equals(m.getName());
} catch (Exception e) {
return false;
}
}));
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class JobEntryTransTest method testGetTransMeta.
@Test
public void testGetTransMeta() throws KettleException {
String param1 = "param1";
String param2 = "param2";
String param3 = "param3";
String parentValue1 = "parentValue1";
String parentValue2 = "parentValue2";
String childValue3 = "childValue3";
JobEntryTrans jobEntryTrans = spy(getJobEntryTrans());
JobMeta parentJobMeta = spy(new JobMeta());
when(parentJobMeta.getNamedClusterEmbedManager()).thenReturn(mock(NamedClusterEmbedManager.class));
jobEntryTrans.setParentJobMeta(parentJobMeta);
Repository rep = Mockito.mock(Repository.class);
RepositoryDirectory repositoryDirectory = Mockito.mock(RepositoryDirectory.class);
RepositoryDirectoryInterface repositoryDirectoryInterface = Mockito.mock(RepositoryDirectoryInterface.class);
Mockito.doReturn(repositoryDirectoryInterface).when(rep).loadRepositoryDirectoryTree();
Mockito.doReturn(repositoryDirectory).when(repositoryDirectoryInterface).findDirectory("/home/admin");
TransMeta meta = new TransMeta();
meta.setVariable(param2, "childValue2 should be override");
meta.setVariable(param3, childValue3);
Mockito.doReturn(meta).when(rep).loadTransformation(Mockito.eq("test.ktr"), Mockito.anyObject(), Mockito.anyObject(), Mockito.anyBoolean(), Mockito.anyObject());
VariableSpace parentSpace = new Variables();
parentSpace.setVariable(param1, parentValue1);
parentSpace.setVariable(param2, parentValue2);
jobEntryTrans.setFileName("/home/admin/test.ktr");
Mockito.doNothing().when(jobEntryTrans).logBasic(Mockito.anyString());
jobEntryTrans.setSpecificationMethod(ObjectLocationSpecificationMethod.FILENAME);
TransMeta transMeta;
jobEntryTrans.setPassingAllParameters(false);
transMeta = jobEntryTrans.getTransMeta(rep, null, parentSpace);
Assert.assertEquals(null, transMeta.getVariable(param1));
Assert.assertEquals(parentValue2, transMeta.getVariable(param2));
Assert.assertEquals(childValue3, transMeta.getVariable(param3));
jobEntryTrans.setPassingAllParameters(true);
transMeta = jobEntryTrans.getTransMeta(rep, null, parentSpace);
Assert.assertEquals(parentValue1, transMeta.getVariable(param1));
Assert.assertEquals(parentValue2, transMeta.getVariable(param2));
Assert.assertEquals(childValue3, transMeta.getVariable(param3));
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class JobEntryTransTest method testChooseSpecMethodByRepositoryConnectionStatus.
/**
* BACKLOG-179 - Exporting/Importing Jobs breaks Transformation specification when using "Specify by reference"
*
* Test checks that we choose different {@link ObjectLocationSpecificationMethod} when connection to
* {@link Repository} and disconnected.
*
* <b>Important!</b> You must rewrite test when change import logic
*
* @throws KettleXMLException
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
*/
@Test
public void testChooseSpecMethodByRepositoryConnectionStatus() throws KettleXMLException, ParserConfigurationException, SAXException, IOException {
Repository rep = mock(Repository.class);
when(rep.isConnected()).thenReturn(true);
// 000
// not connected, no jobname, no method
testJobEntry(null, false, null, ObjectLocationSpecificationMethod.FILENAME);
// 001
// not connected, no jobname, REPOSITORY_BY_REFERENCE method
testJobEntry(null, false, ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE, ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE);
// not connected, no jobname, REPOSITORY_BY_NAME method
testJobEntry(null, false, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
// not connected, no jobname, FILENAME method
testJobEntry(null, false, ObjectLocationSpecificationMethod.FILENAME, ObjectLocationSpecificationMethod.FILENAME);
// 010
// not connected, jobname, no method
testJobEntry(null, true, null, ObjectLocationSpecificationMethod.FILENAME);
// 011
// not connected, jobname, REPOSITORY_BY_REFERENCE method
testJobEntry(null, true, ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE, ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE);
// not connected, jobname, REPOSITORY_BY_NAME method
testJobEntry(null, true, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
// not connected, jobname, FILENAME method
testJobEntry(null, true, ObjectLocationSpecificationMethod.FILENAME, ObjectLocationSpecificationMethod.FILENAME);
// 100
// connected, no jobname, no method
testJobEntry(rep, false, null, ObjectLocationSpecificationMethod.FILENAME);
// 101
// connected, no jobname, REPOSITORY_BY_REFERENCE method
testJobEntry(rep, false, ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE, ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE);
// connected, no jobname, REPOSITORY_BY_NAME method
testJobEntry(rep, false, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
// connected, no jobname, FILENAME method
testJobEntry(rep, false, ObjectLocationSpecificationMethod.FILENAME, ObjectLocationSpecificationMethod.FILENAME);
// 110
// connected, jobname, no method
testJobEntry(rep, true, null, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
// 111
// connected, jobname, REPOSITORY_BY_REFERENCE method
testJobEntry(rep, true, ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
// connected, jobname, REPOSITORY_BY_NAME method
testJobEntry(rep, true, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
// connected, jobname, FILENAME method
testJobEntry(rep, true, ObjectLocationSpecificationMethod.FILENAME, ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
}
Aggregations