use of org.pentaho.di.trans.TransExecutionConfiguration in project pentaho-kettle by pentaho.
the class BaseCluster method createClusteredTransExecutionConfiguration.
public static TransExecutionConfiguration createClusteredTransExecutionConfiguration() {
TransExecutionConfiguration config = new TransExecutionConfiguration();
config.setExecutingClustered(true);
config.setExecutingLocally(false);
config.setExecutingRemotely(false);
config.setClusterPosting(true);
config.setClusterPreparing(true);
config.setClusterStarting(true);
config.setLogLevel(LogLevel.MINIMAL);
return config;
}
use of org.pentaho.di.trans.TransExecutionConfiguration in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runMultipleCopiesOnMultipleSlaves2.
/**
* This test reads a CSV file and sends the data to 3 copies on 3 slave servers.<br>
*/
public void runMultipleCopiesOnMultipleSlaves2() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-hops-between-multiple-copies-steps-on-cluster.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <runMultipleCopiesOnMultipleSlaves2>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-multiple-copies-on-multiple-slaves2.txt");
assertEqualsIgnoreWhitespacesAndCase("90000", result);
}
use of org.pentaho.di.trans.TransExecutionConfiguration in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runParallelFileReadOnMasterWithCopies.
/**
* This test reads a CSV file in parallel on the master in 3 copies.<br>
* It then passes the data over to a dummy step on the slaves.<br>
*/
public void runParallelFileReadOnMasterWithCopies() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-master-with-copies.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <runParallelFileReadOnMasterWithCopies>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-master-result-with-copies.txt");
assertEqualsIgnoreWhitespacesAndCase("100", result);
}
use of org.pentaho.di.trans.TransExecutionConfiguration in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runParallelFileReadOnMaster.
/**
* This test reads a CSV file in parallel on the master in 1 copy.<br>
* It then passes the data over to a dummy step on the slaves.<br>
* We want to make sure that only 1 copy is considered.<br>
*/
public void runParallelFileReadOnMaster() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-master.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <testParallelFileReadOnMaster>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-master-result.txt");
assertEqualsIgnoreWhitespacesAndCase("100", result);
}
use of org.pentaho.di.trans.TransExecutionConfiguration in project pentaho-kettle by pentaho.
the class MasterSlaveIT method runParallelFileReadOnSlaves.
/**
* This test reads a CSV file in parallel on all 3 slaves, each with 1 copy.<br>
* It then passes the data over to a dummy step on the slaves.<br>
*/
public void runParallelFileReadOnSlaves() throws Exception {
TransMeta transMeta = loadTransMetaReplaceSlavesInCluster(clusterGenerator, "test/org/pentaho/di/cluster/test-parallel-file-read-on-slaves.ktr");
TransExecutionConfiguration config = createClusteredTransExecutionConfiguration();
TransSplitter transSplitter = Trans.executeClustered(transMeta, config);
LogChannel logChannel = createLogChannel("cluster unit test <runParallelFileReadOnSlaves>");
long nrErrors = Trans.monitorClusteredTransformation(logChannel, transSplitter, null, 1);
assertEquals(0L, nrErrors);
String result = loadFileContent(transMeta, "${java.io.tmpdir}/test-parallel-file-read-on-slaves.txt");
assertEqualsIgnoreWhitespacesAndCase("100", result);
}
Aggregations