use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class JobEntryJobTest method testConnectedLoad_RepByName_MultipleParameters.
/**
* When connected to the repository and {@link JobEntryJob} references a child job by name using multiple parameters,
* keep {@link ObjectLocationSpecificationMethod} as {@code REPOSITORY_BY_NAME}.
* Load the job from the repository using the specified job name and directory.
*/
@Test
public void testConnectedLoad_RepByName_MultipleParameters() throws Exception {
Repository myrepo = mock(Repository.class);
doReturn(true).when(myrepo).isConnected();
doReturn(rdi).when(myrepo).loadRepositoryDirectoryTree();
doReturn(null).when(myrepo).getJobEntryAttributeString(any(ObjectId.class), anyString());
doReturn("rep_name").when(myrepo).getJobEntryAttributeString(JOB_ENTRY_JOB_OBJECT_ID, "specification_method");
doReturn("${jobname}").when(myrepo).getJobEntryAttributeString(JOB_ENTRY_JOB_OBJECT_ID, "name");
doReturn("${repositorypath}").when(myrepo).getJobEntryAttributeString(JOB_ENTRY_JOB_OBJECT_ID, "dir_path");
JobEntryJob jej = getJej();
jej.loadRep(myrepo, store, JOB_ENTRY_JOB_OBJECT_ID, databases, servers);
jej.getJobMeta(myrepo, store, space);
assertEquals(ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME, jej.getSpecificationMethod());
verify(myrepo, times(1)).loadJob("job.kjb", directory, null, null);
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class JobEntryJobTest method testConnectedLoad_RepByName.
/**
* When connected to the repository and {@link JobEntryJob} references a child job by name,
* keep {@link ObjectLocationSpecificationMethod} as {@code REPOSITORY_BY_NAME}.
* Load the job from the repository using the specified job name and directory.
*/
@Test
public void testConnectedLoad_RepByName() throws Exception {
Repository myrepo = mock(Repository.class);
doReturn(true).when(myrepo).isConnected();
doReturn(rdi).when(myrepo).loadRepositoryDirectoryTree();
doReturn(null).when(myrepo).getJobEntryAttributeString(any(ObjectId.class), anyString());
doReturn("rep_name").when(myrepo).getJobEntryAttributeString(JOB_ENTRY_JOB_OBJECT_ID, "specification_method");
doReturn(JOB_ENTRY_FILE_NAME).when(myrepo).getJobEntryAttributeString(JOB_ENTRY_JOB_OBJECT_ID, "name");
doReturn(JOB_ENTRY_FILE_DIRECTORY).when(myrepo).getJobEntryAttributeString(JOB_ENTRY_JOB_OBJECT_ID, "dir_path");
JobEntryJob jej = getJej();
jej.loadRep(myrepo, store, JOB_ENTRY_JOB_OBJECT_ID, databases, servers);
jej.getJobMeta(myrepo, store, space);
assertEquals(ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME, jej.getSpecificationMethod());
verify(myrepo, times(1)).loadJob(JOB_ENTRY_FILE_NAME, directory, null, null);
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class JobExecutionConfigurationTest method testConnectRepository.
@Test
public void testConnectRepository() throws KettleException {
JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration();
final RepositoriesMeta repositoriesMeta = mock(RepositoriesMeta.class);
final RepositoryMeta repositoryMeta = mock(RepositoryMeta.class);
final Repository repository = mock(Repository.class);
final String mockRepo = "mockRepo";
final boolean[] connectionSuccess = { false };
Repository initialRepo = mock(Repository.class);
jobExecutionConfiguration.setRepository(initialRepo);
KettleLogStore.init();
// Create mock repository plugin
MockRepositoryPlugin mockRepositoryPlugin = mock(MockRepositoryPlugin.class);
when(mockRepositoryPlugin.getIds()).thenReturn(new String[] { "mockRepo" });
when(mockRepositoryPlugin.matches("mockRepo")).thenReturn(true);
when(mockRepositoryPlugin.getName()).thenReturn("mock-repository");
when(mockRepositoryPlugin.getClassMap()).thenAnswer(new Answer<Map<Class<?>, String>>() {
@Override
public Map<Class<?>, String> answer(InvocationOnMock invocation) throws Throwable {
Map<Class<?>, String> dbMap = new HashMap<Class<?>, String>();
dbMap.put(Repository.class, repositoryMeta.getClass().getName());
return dbMap;
}
});
PluginRegistry.getInstance().registerPlugin(RepositoryPluginType.class, mockRepositoryPlugin);
// Define valid connection criteria
when(repositoriesMeta.findRepository(anyString())).thenAnswer(new Answer<RepositoryMeta>() {
@Override
public RepositoryMeta answer(InvocationOnMock invocation) throws Throwable {
return mockRepo.equals(invocation.getArguments()[0]) ? repositoryMeta : null;
}
});
when(mockRepositoryPlugin.loadClass(Repository.class)).thenReturn(repository);
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
if ("username".equals(invocation.getArguments()[0]) && "password".equals(invocation.getArguments()[1])) {
connectionSuccess[0] = true;
} else {
connectionSuccess[0] = false;
throw new KettleException("Mock Repository connection failed");
}
return null;
}
}).when(repository).connect(anyString(), anyString());
// Ignore repository not found in RepositoriesMeta
jobExecutionConfiguration.connectRepository(repositoriesMeta, "notFound", "username", "password");
assertEquals("Repository Changed", initialRepo, jobExecutionConfiguration.getRepository());
// Ignore failed attempt to connect
jobExecutionConfiguration.connectRepository(repositoriesMeta, mockRepo, "username", "");
assertEquals("Repository Changed", initialRepo, jobExecutionConfiguration.getRepository());
// Save repository if connection passes
jobExecutionConfiguration.connectRepository(repositoriesMeta, mockRepo, "username", "password");
assertEquals("Repository didn't change", repository, jobExecutionConfiguration.getRepository());
assertTrue("Repository not connected", connectionSuccess[0]);
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class UnivariateStatsMetaFunctionTest method testRepoConstructor.
@Test
public void testRepoConstructor() throws ParseException, KettleException, IOException {
String jsString = IOUtils.toString(UnivariateStatsMetaTest.class.getClassLoader().getResourceAsStream("org/pentaho/di/trans/steps/univariatestats/trueValuesUnivariateStatsMetaFunctionNode.json"));
Repository repo = new MemoryRepository(jsString);
UnivariateStatsMetaFunction function = new UnivariateStatsMetaFunction(repo, new StringObjectId("test"), 0);
assertEquals("test", function.getSourceFieldName());
assertTrue(function.getCalcN());
assertTrue(function.getCalcMean());
assertTrue(function.getCalcStdDev());
assertTrue(function.getCalcMin());
assertTrue(function.getCalcMax());
assertTrue(function.getCalcMedian());
assertEquals(0.5, function.getCalcPercentile(), 0);
assertTrue(function.getInterpolatePercentile());
jsString = IOUtils.toString(UnivariateStatsMetaTest.class.getClassLoader().getResourceAsStream("org/pentaho/di/trans/steps/univariatestats/falseValuesUnivariateStatsMetaFunctionNode.json"));
repo = new MemoryRepository(jsString);
function = new UnivariateStatsMetaFunction(repo, new StringObjectId("test"), 0);
assertTrue(Utils.isEmpty(function.getSourceFieldName()));
assertFalse(function.getCalcN());
assertFalse(function.getCalcMean());
assertFalse(function.getCalcStdDev());
assertFalse(function.getCalcMin());
assertFalse(function.getCalcMax());
assertFalse(function.getCalcMedian());
assertEquals(-1.0, function.getCalcPercentile(), 0);
assertFalse(function.getInterpolatePercentile());
}
use of org.pentaho.di.repository.Repository in project pentaho-kettle by pentaho.
the class TableOutputMetaTest method testSaveRep.
@Test
public void testSaveRep() throws Exception {
TableOutputMeta tableOutputMeta = new TableOutputMeta();
tableOutputMeta.loadXML(getTestNode(), databases, metaStore);
StringObjectId id_step = new StringObjectId("stepid");
StringObjectId id_transformation = new StringObjectId("transid");
Repository rep = mock(Repository.class);
tableOutputMeta.saveRep(rep, metaStore, id_transformation, id_step);
verify(rep).saveDatabaseMetaStepAttribute(id_transformation, id_step, "id_connection", null);
verify(rep).saveStepAttribute(id_transformation, id_step, "schema", "public");
verify(rep).saveStepAttribute(id_transformation, id_step, "table", "sales_csv");
verify(rep).saveStepAttribute(id_transformation, id_step, "commit", "1000");
verify(rep).saveStepAttribute(id_transformation, id_step, "truncate", true);
verify(rep).saveStepAttribute(id_transformation, id_step, "ignore_errors", false);
verify(rep).saveStepAttribute(id_transformation, id_step, "use_batch", true);
verify(rep).saveStepAttribute(id_transformation, id_step, "specify_fields", true);
verify(rep).saveStepAttribute(id_transformation, id_step, "partitioning_enabled", false);
verify(rep).saveStepAttribute(id_transformation, id_step, "partitioning_field", null);
verify(rep).saveStepAttribute(id_transformation, id_step, "partitioning_daily", false);
verify(rep).saveStepAttribute(id_transformation, id_step, "partitioning_monthly", true);
verify(rep).saveStepAttribute(id_transformation, id_step, "tablename_in_field", false);
verify(rep).saveStepAttribute(id_transformation, id_step, "tablename_field", null);
verify(rep).saveStepAttribute(id_transformation, id_step, "tablename_in_table", true);
verify(rep).saveStepAttribute(id_transformation, id_step, "return_keys", false);
verify(rep).saveStepAttribute(id_transformation, id_step, "return_field", null);
verify(rep).saveStepAttribute(id_transformation, id_step, 0, "column_name", "ORDERNUMBER");
verify(rep).saveStepAttribute(id_transformation, id_step, 0, "stream_name", "ORDERNUMBER");
verify(rep).saveStepAttribute(id_transformation, id_step, 1, "column_name", "QUANTITYORDERED");
verify(rep).saveStepAttribute(id_transformation, id_step, 1, "stream_name", "QUANTITYORDERED");
verify(rep).saveStepAttribute(id_transformation, id_step, 2, "column_name", "PRICEEACH");
verify(rep).saveStepAttribute(id_transformation, id_step, 2, "stream_name", "PRICEEACH");
verifyNoMoreInteractions(rep);
}
Aggregations