Search in sources :

Example 11 with LogChannel

use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.

the class CarteIT method before.

@Before
public void before() {
    carte.setJobMap(new JobMap());
    carte.setTransformationMap(new TransformationMap());
    carte.setSocketRepository(new SocketRepository(new LogChannel("Carte")));
    tester = new ServletTester();
    tester.addServlet(GetRootServlet.class, "/*");
    tester.addServlet(GetStatusServlet.class, GetStatusServlet.CONTEXT_PATH);
    tester.addServlet(AddTransServlet.class, RegisterTransServlet.CONTEXT_PATH);
    tester.addServlet(StartTransServlet.class, StartTransServlet.CONTEXT_PATH);
    tester.addServlet(PauseTransServlet.class, PauseTransServlet.CONTEXT_PATH);
    try {
        tester.start();
        System.out.println("Started");
    } catch (Exception ex) {
        ex.printStackTrace();
        Assert.fail(ex.getMessage());
    }
}
Also used : ServletTester(org.eclipse.jetty.testing.ServletTester) LogChannel(org.pentaho.di.core.logging.LogChannel) IOException(java.io.IOException) SAXException(org.xml.sax.SAXException) Before(org.junit.Before)

Example 12 with LogChannel

use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.

the class RunTransServletTest method testRunTransServletCheckParameter.

@Test
public void testRunTransServletCheckParameter() throws Exception {
    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
    HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
    Mockito.when(request.getParameter("trans")).thenReturn("home/test.rtr");
    StringWriter out = new StringWriter();
    PrintWriter printWriter = new PrintWriter(out);
    Mockito.when(request.getContextPath()).thenReturn(RunTransServlet.CONTEXT_PATH);
    Mockito.when(response.getWriter()).thenReturn(printWriter);
    TransformationMap mockTransformationMap = Mockito.mock(TransformationMap.class);
    SlaveServerConfig slaveServerConfig = Mockito.mock(SlaveServerConfig.class);
    Mockito.when(mockTransformationMap.getSlaveServerConfig()).thenReturn(slaveServerConfig);
    Repository repository = Mockito.mock(Repository.class);
    Mockito.when(slaveServerConfig.getRepository()).thenReturn(repository);
    RepositoryDirectoryInterface repositoryDirectoryInterface = Mockito.mock(RepositoryDirectoryInterface.class);
    Mockito.when(repository.loadRepositoryDirectoryTree()).thenReturn(repositoryDirectoryInterface);
    Mockito.when(repositoryDirectoryInterface.findDirectory(Mockito.anyString())).thenReturn(repositoryDirectoryInterface);
    TransMeta transMeta = Mockito.mock(TransMeta.class);
    Mockito.when(repository.loadTransformation(Mockito.any(), Mockito.any())).thenReturn(transMeta);
    String testParameter = "testParameter";
    Mockito.when(transMeta.listVariables()).thenReturn(new String[] { testParameter });
    Mockito.when(transMeta.getVariable(Mockito.anyString())).thenReturn("default value");
    Mockito.when(transMeta.listParameters()).thenReturn(new String[] { testParameter });
    Mockito.when(request.getParameterNames()).thenReturn(new StringTokenizer(testParameter));
    String testValue = "testValue";
    Mockito.when(request.getParameterValues(testParameter)).thenReturn(new String[] { testValue });
    RunTransServlet runTransServlet = Mockito.mock(RunTransServlet.class);
    Mockito.doCallRealMethod().when(runTransServlet).doGet(Mockito.anyObject(), Mockito.anyObject());
    Trans trans = new Trans(transMeta, new SimpleLoggingObject(RunTransServlet.CONTEXT_PATH, LoggingObjectType.CARTE, null));
    Mockito.when(runTransServlet.createTrans(Mockito.anyObject(), Mockito.anyObject())).thenReturn(trans);
    Mockito.when(transMeta.getParameterValue(Mockito.eq(testParameter))).thenReturn(testValue);
    runTransServlet.log = new LogChannel("RunTransServlet");
    runTransServlet.transformationMap = mockTransformationMap;
    runTransServlet.doGet(request, response);
    Assert.assertEquals(testValue, trans.getParameterValue(testParameter));
}
Also used : RepositoryDirectoryInterface(org.pentaho.di.repository.RepositoryDirectoryInterface) TransMeta(org.pentaho.di.trans.TransMeta) HttpServletResponse(javax.servlet.http.HttpServletResponse) LogChannel(org.pentaho.di.core.logging.LogChannel) SimpleLoggingObject(org.pentaho.di.core.logging.SimpleLoggingObject) HttpServletRequest(javax.servlet.http.HttpServletRequest) Repository(org.pentaho.di.repository.Repository) StringTokenizer(java.util.StringTokenizer) StringWriter(java.io.StringWriter) Trans(org.pentaho.di.trans.Trans) PrintWriter(java.io.PrintWriter) Test(org.junit.Test)

Example 13 with LogChannel

use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.

the class MasterSlaveIT method runParallelFileReadOnSlavesWithPartitioning2.

/**
 * This test reads a CSV file in parallel on all 3 slaves, each with 4 partitions.<br>
 * This is a variation on the test right above, with 2 steps in sequence in clustering & partitioning.<br>
 * It then passes the data over to a dummy step on the slaves.<br>
 */
public void runParallelFileReadOnSlavesWithPartitioning2() throws Exception {
    TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-slaves-with-partitioning2.ktr");
    TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
    TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
    LogChannel logChannel = createLogChannel("cluster unit test <runParallelFileReadOnSlavesWithPartitioning2>");
    long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
    assertEquals(0L, nrErrors);
    String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-slaves-with-partitioning2.txt");
    assertEqualsIgnoreWhitespacesAndCase("100", result);
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) TransMeta(org.pentaho.di.trans.TransMeta) LogChannel(org.pentaho.di.core.logging.LogChannel) TransSplitter(org.pentaho.di.trans.cluster.TransSplitter)

Example 14 with LogChannel

use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.

the class MasterSlaveIT method runParallelFileReadOnSlavesWithPartitioning.

/**
 * This test reads a CSV file in parallel on all 3 slaves, each with 4 partitions.<br>
 * It then passes the data over to a dummy step on the slaves.<br>
 */
public void runParallelFileReadOnSlavesWithPartitioning() throws Exception {
    TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-slaves-with-partitioning.ktr");
    TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
    TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
    LogChannel logChannel = createLogChannel("cluster unit test <runParallelFileReadOnSlavesWithPartitioning>");
    long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
    assertEquals(0L, nrErrors);
    String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-slaves-with-partitioning.txt");
    assertEqualsIgnoreWhitespacesAndCase("100", result);
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) TransMeta(org.pentaho.di.trans.TransMeta) LogChannel(org.pentaho.di.core.logging.LogChannel) TransSplitter(org.pentaho.di.trans.cluster.TransSplitter)

Example 15 with LogChannel

use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.

the class MasterSlaveIT method runOneStepClustered.

/**
 * This test generates rows on the master, generates random values clustered and brings them back the master.<br>
 * See also: PDI-6324 : Generate Rows to a clustered step ceases to work
 */
public void runOneStepClustered() throws Exception {
    TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/one-step-clustered.ktr");
    TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
    TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
    LogChannel logChannel = createLogChannel("cluster unit test <runOneStepClustered>");
    long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
    assertEquals(0L, nrErrors);
    String result = loadFileContent(transMeta, "${java.io.tmpdir}/one-step-clustered.txt");
    assertEqualsIgnoreWhitespacesAndCase("10000", result);
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) TransMeta(org.pentaho.di.trans.TransMeta) LogChannel(org.pentaho.di.core.logging.LogChannel) TransSplitter(org.pentaho.di.trans.cluster.TransSplitter)

Aggregations

LogChannel (org.pentaho.di.core.logging.LogChannel)44 TransMeta (org.pentaho.di.trans.TransMeta)17 TransExecutionConfiguration (org.pentaho.di.trans.TransExecutionConfiguration)13 TransSplitter (org.pentaho.di.trans.cluster.TransSplitter)13 Test (org.junit.Test)8 File (java.io.File)6 LogChannelInterface (org.pentaho.di.core.logging.LogChannelInterface)6 Before (org.junit.Before)4 Repository (org.pentaho.di.repository.Repository)4 Node (org.w3c.dom.Node)4 IOException (java.io.IOException)3 FileObject (org.apache.commons.vfs2.FileObject)3 InvocationOnMock (org.mockito.invocation.InvocationOnMock)3 SlaveServer (org.pentaho.di.cluster.SlaveServer)3 FileInputStream (java.io.FileInputStream)2 InputStream (java.io.InputStream)2 ArrayList (java.util.ArrayList)2 Display (org.eclipse.swt.widgets.Display)2 Matchers.anyObject (org.mockito.Matchers.anyObject)2 KettleException (org.pentaho.di.core.exception.KettleException)2