Search in sources :

Example 36 with LogChannel

use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.

the class BlackBoxIT method runTransOrJob.

// This is a generic JUnit 4 test that takes no parameters
@Test
public void runTransOrJob() throws Exception {
    // Params are:
    // File transFile
    // List<File> expectedFiles
    LogChannelInterface log = new LogChannel("BlackBoxTest [" + transFile.toString() + "]");
    if (!transFile.exists()) {
        log.logError("Transformation does not exist: " + getPath(transFile));
        addFailure("Transformation does not exist: " + getPath(transFile));
        fail("Transformation does not exist: " + getPath(transFile));
    }
    if (expectedFiles.isEmpty()) {
        addFailure("No expected output files found: " + getPath(transFile));
        fail("No expected output files found: " + getPath(transFile));
    }
    Result result = runTrans(transFile.getAbsolutePath(), log);
    // 
    for (int i = 0; i < expectedFiles.size(); i++) {
        File expected = expectedFiles.get(i);
        if (expected.getAbsoluteFile().toString().contains(".expected")) {
            // create a path to the expected output
            String actualFile = expected.getAbsolutePath();
            // multiple files case
            actualFile = actualFile.replaceFirst(".expected_" + i + ".", ".actual_" + i + ".");
            // single file case
            actualFile = actualFile.replaceFirst(".expected.", ".actual.");
            File actual = new File(actualFile);
            if (result.getResult()) {
                fileCompare(expected, actual);
            }
        }
    }
    // 
    if (!result.getResult()) {
        String logStr = KettleLogStore.getAppender().getBuffer(result.getLogChannelId(), true).toString();
        if (expectedFiles.size() == 0) {
            // We haven't got a ".fail.txt" file, so this is a real failure
            fail("Error running " + getPath(transFile) + ":" + logStr);
        }
    }
}
Also used : LogChannel(org.pentaho.di.core.logging.LogChannel) LogChannelInterface(org.pentaho.di.core.logging.LogChannelInterface) File(java.io.File) Result(org.pentaho.di.core.Result) Test(org.junit.Test)

Example 37 with LogChannel

use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.

the class MasterSlaveIT method runMultipleCopiesOnMultipleSlaves2.

/**
 * This test reads a CSV file and sends the data to 3 copies on 3 slave servers.<br>
 */
public void runMultipleCopiesOnMultipleSlaves2() throws Exception {
    TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-hops-between-multiple-copies-steps-on-cluster.ktr");
    TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
    TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
    LogChannel logChannel = createLogChannel("cluster unit test <runMultipleCopiesOnMultipleSlaves2>");
    long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
    assertEquals(0L, nrErrors);
    String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-multiple-copies-on-multiple-slaves2.txt");
    assertEqualsIgnoreWhitespacesAndCase("90000", result);
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) TransMeta(org.pentaho.di.trans.TransMeta) LogChannel(org.pentaho.di.core.logging.LogChannel) TransSplitter(org.pentaho.di.trans.cluster.TransSplitter)

Example 38 with LogChannel

use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.

the class MasterSlaveIT method runParallelFileReadOnMasterWithCopies.

/**
 * This test reads a CSV file in parallel on the master in 3 copies.<br>
 * It then passes the data over to a dummy step on the slaves.<br>
 */
public void runParallelFileReadOnMasterWithCopies() throws Exception {
    TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-master-with-copies.ktr");
    TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
    TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
    LogChannel logChannel = createLogChannel("cluster unit test <runParallelFileReadOnMasterWithCopies>");
    long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
    assertEquals(0L, nrErrors);
    String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-master-result-with-copies.txt");
    assertEqualsIgnoreWhitespacesAndCase("100", result);
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) TransMeta(org.pentaho.di.trans.TransMeta) LogChannel(org.pentaho.di.core.logging.LogChannel) TransSplitter(org.pentaho.di.trans.cluster.TransSplitter)

Example 39 with LogChannel

use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.

the class MasterSlaveIT method runParallelFileReadOnMaster.

/**
 * This test reads a CSV file in parallel on the master in 1 copy.<br>
 * It then passes the data over to a dummy step on the slaves.<br>
 * We want to make sure that only 1 copy is considered.<br>
 */
public void runParallelFileReadOnMaster() throws Exception {
    TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-master.ktr");
    TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
    TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
    LogChannel logChannel = createLogChannel("cluster unit test <testParallelFileReadOnMaster>");
    long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
    assertEquals(0L, nrErrors);
    String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-master-result.txt");
    assertEqualsIgnoreWhitespacesAndCase("100", result);
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) TransMeta(org.pentaho.di.trans.TransMeta) LogChannel(org.pentaho.di.core.logging.LogChannel) TransSplitter(org.pentaho.di.trans.cluster.TransSplitter)

Example 40 with LogChannel

use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.

the class MasterSlaveIT method runParallelFileReadOnSlaves.

/**
 * This test reads a CSV file in parallel on all 3 slaves, each with 1 copy.<br>
 * It then passes the data over to a dummy step on the slaves.<br>
 */
public void runParallelFileReadOnSlaves() throws Exception {
    TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-slaves.ktr");
    TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
    TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
    LogChannel logChannel = createLogChannel("cluster unit test <runParallelFileReadOnSlaves>");
    long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
    assertEquals(0L, nrErrors);
    String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-slaves.txt");
    assertEqualsIgnoreWhitespacesAndCase("100", result);
}
Also used : TransExecutionConfiguration(org.pentaho.di.trans.TransExecutionConfiguration) TransMeta(org.pentaho.di.trans.TransMeta) LogChannel(org.pentaho.di.core.logging.LogChannel) TransSplitter(org.pentaho.di.trans.cluster.TransSplitter)

Aggregations

LogChannel (org.pentaho.di.core.logging.LogChannel)44 TransMeta (org.pentaho.di.trans.TransMeta)17 TransExecutionConfiguration (org.pentaho.di.trans.TransExecutionConfiguration)13 TransSplitter (org.pentaho.di.trans.cluster.TransSplitter)13 Test (org.junit.Test)8 File (java.io.File)6 LogChannelInterface (org.pentaho.di.core.logging.LogChannelInterface)6 Before (org.junit.Before)4 Repository (org.pentaho.di.repository.Repository)4 Node (org.w3c.dom.Node)4 IOException (java.io.IOException)3 FileObject (org.apache.commons.vfs2.FileObject)3 InvocationOnMock (org.mockito.invocation.InvocationOnMock)3 SlaveServer (org.pentaho.di.cluster.SlaveServer)3 FileInputStream (java.io.FileInputStream)2 InputStream (java.io.InputStream)2 ArrayList (java.util.ArrayList)2 Display (org.eclipse.swt.widgets.Display)2 Matchers.anyObject (org.mockito.Matchers.anyObject)2 KettleException (org.pentaho.di.core.exception.KettleException)2