use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.
the class CarteIT method before.
@Before
public void before() {
carte.setJobMap(new JobMap());
carte.setTransformationMap(new TransformationMap());
carte.setSocketRepository(new SocketRepository(new LogChannel("Carte")));
tester = new ServletTester();
tester.addServlet(GetRootServlet.class, "/*");
tester.addServlet(GetStatusServlet.class, GetStatusServlet.CONTEXT_PATH);
tester.addServlet(AddTransServlet.class, RegisterTransServlet.CONTEXT_PATH);
tester.addServlet(StartTransServlet.class, StartTransServlet.CONTEXT_PATH);
tester.addServlet(PauseTransServlet.class, PauseTransServlet.CONTEXT_PATH);
try {
tester.start();
System.out.println("Started");
} catch (Exception ex) {
ex.printStackTrace();
Assert.fail(ex.getMessage());
}
}
use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.
the class RunTransServletTest method testRunTransServletCheckParameter.
@Test
public void testRunTransServletCheckParameter() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.when(request.getParameter("trans")).thenReturn("home/test.rtr");
StringWriter out = new StringWriter();
PrintWriter printWriter = new PrintWriter(out);
Mockito.when(request.getContextPath()).thenReturn(RunTransServlet.CONTEXT_PATH);
Mockito.when(response.getWriter()).thenReturn(printWriter);
TransformationMap mockTransformationMap = Mockito.mock(TransformationMap.class);
SlaveServerConfig slaveServerConfig = Mockito.mock(SlaveServerConfig.class);
Mockito.when(mockTransformationMap.getSlaveServerConfig()).thenReturn(slaveServerConfig);
Repository repository = Mockito.mock(Repository.class);
Mockito.when(slaveServerConfig.getRepository()).thenReturn(repository);
RepositoryDirectoryInterface repositoryDirectoryInterface = Mockito.mock(RepositoryDirectoryInterface.class);
Mockito.when(repository.loadRepositoryDirectoryTree()).thenReturn(repositoryDirectoryInterface);
Mockito.when(repositoryDirectoryInterface.findDirectory(Mockito.anyString())).thenReturn(repositoryDirectoryInterface);
TransMeta transMeta = Mockito.mock(TransMeta.class);
Mockito.when(repository.loadTransformation(Mockito.any(), Mockito.any())).thenReturn(transMeta);
String testParameter = "testParameter";
Mockito.when(transMeta.listVariables()).thenReturn(new String[] { testParameter });
Mockito.when(transMeta.getVariable(Mockito.anyString())).thenReturn("default value");
Mockito.when(transMeta.listParameters()).thenReturn(new String[] { testParameter });
Mockito.when(request.getParameterNames()).thenReturn(new StringTokenizer(testParameter));
String testValue = "testValue";
Mockito.when(request.getParameterValues(testParameter)).thenReturn(new String[] { testValue });
RunTransServlet runTransServlet = Mockito.mock(RunTransServlet.class);
Mockito.doCallRealMethod().when(runTransServlet).doGet(Mockito.anyObject(), Mockito.anyObject());
Trans trans = new Trans(transMeta, new SimpleLoggingObject(RunTransServlet.CONTEXT_PATH, LoggingObjectType.CARTE, null));
Mockito.when(runTransServlet.createTrans(Mockito.anyObject(), Mockito.anyObject())).thenReturn(trans);
Mockito.when(transMeta.getParameterValue(Mockito.eq(testParameter))).thenReturn(testValue);
runTransServlet.log = new LogChannel("RunTransServlet");
runTransServlet.transformationMap = mockTransformationMap;
runTransServlet.doGet(request, response);
Assert.assertEquals(testValue, trans.getParameterValue(testParameter));
}
use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runParallelFileReadOnSlavesWithPartitioning2.
/**
* This test reads a CSV file in parallel on all 3 slaves, each with 4 partitions.<br>
* This is a variation on the test right above, with 2 steps in sequence in clustering & partitioning.<br>
* It then passes the data over to a dummy step on the slaves.<br>
*/
public void runParallelFileReadOnSlavesWithPartitioning2() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-slaves-with-partitioning2.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <runParallelFileReadOnSlavesWithPartitioning2>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-slaves-with-partitioning2.txt");
assertEqualsIgnoreWhitespacesAndCase("100", result);
}
use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runParallelFileReadOnSlavesWithPartitioning.
/**
* This test reads a CSV file in parallel on all 3 slaves, each with 4 partitions.<br>
* It then passes the data over to a dummy step on the slaves.<br>
*/
public void runParallelFileReadOnSlavesWithPartitioning() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-slaves-with-partitioning.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <runParallelFileReadOnSlavesWithPartitioning>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-slaves-with-partitioning.txt");
assertEqualsIgnoreWhitespacesAndCase("100", result);
}
use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runOneStepClustered.
/**
* This test generates rows on the master, generates random values clustered and brings them back the master.<br>
* See also: PDI-6324 : Generate Rows to a clustered step ceases to work
*/
public void runOneStepClustered() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/one-step-clustered.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <runOneStepClustered>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/one-step-clustered.txt");
assertEqualsIgnoreWhitespacesAndCase("10000", result);
}
Aggregations