use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.
the class BlackBoxIT method runTransOrJob.
// This is a generic JUnit 4 test that takes no parameters
@Test
public void runTransOrJob() throws Exception {
// Params are:
// File transFile
// List<File> expectedFiles
LogChannelInterface log = new LogChannel("BlackBoxTest [" + transFile.toString() + "]");
if (!transFile.exists()) {
log.logError("Transformation does not exist: " + getPath(transFile));
addFailure("Transformation does not exist: " + getPath(transFile));
fail("Transformation does not exist: " + getPath(transFile));
}
if (expectedFiles.isEmpty()) {
addFailure("No expected output files found: " + getPath(transFile));
fail("No expected output files found: " + getPath(transFile));
}
Result result = runTrans(transFile.getAbsolutePath(), log);
//
for (int i = 0; i < expectedFiles.size(); i++) {
File expected = expectedFiles.get(i);
if (expected.getAbsoluteFile().toString().contains(".expected")) {
// create a path to the expected output
String actualFile = expected.getAbsolutePath();
// multiple files case
actualFile = actualFile.replaceFirst(".expected_" + i + ".", ".actual_" + i + ".");
// single file case
actualFile = actualFile.replaceFirst(".expected.", ".actual.");
File actual = new File(actualFile);
if (result.getResult()) {
fileCompare(expected, actual);
}
}
}
//
if (!result.getResult()) {
String logStr = KettleLogStore.getAppender().getBuffer(result.getLogChannelId(), true).toString();
if (expectedFiles.size() == 0) {
// We haven't got a ".fail.txt" file, so this is a real failure
fail("Error running " + getPath(transFile) + ":" + logStr);
}
}
}
use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runMultipleCopiesOnMultipleSlaves2.
/**
* This test reads a CSV file and sends the data to 3 copies on 3 slave servers.<br>
*/
public void runMultipleCopiesOnMultipleSlaves2() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-hops-between-multiple-copies-steps-on-cluster.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <runMultipleCopiesOnMultipleSlaves2>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-multiple-copies-on-multiple-slaves2.txt");
assertEqualsIgnoreWhitespacesAndCase("90000", result);
}
use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runParallelFileReadOnMasterWithCopies.
/**
* This test reads a CSV file in parallel on the master in 3 copies.<br>
* It then passes the data over to a dummy step on the slaves.<br>
*/
public void runParallelFileReadOnMasterWithCopies() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-master-with-copies.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <runParallelFileReadOnMasterWithCopies>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-master-result-with-copies.txt");
assertEqualsIgnoreWhitespacesAndCase("100", result);
}
use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runParallelFileReadOnMaster.
/**
* This test reads a CSV file in parallel on the master in 1 copy.<br>
* It then passes the data over to a dummy step on the slaves.<br>
* We want to make sure that only 1 copy is considered.<br>
*/
public void runParallelFileReadOnMaster() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-master.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <testParallelFileReadOnMaster>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-master-result.txt");
assertEqualsIgnoreWhitespacesAndCase("100", result);
}
use of org.pentaho.di.core.logging.LogChannel in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runParallelFileReadOnSlaves.
/**
* This test reads a CSV file in parallel on all 3 slaves, each with 1 copy.<br>
* It then passes the data over to a dummy step on the slaves.<br>
*/
public void runParallelFileReadOnSlaves() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-slaves.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <runParallelFileReadOnSlaves>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-slaves.txt");
assertEqualsIgnoreWhitespacesAndCase("100", result);
}
Aggregations