use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.
the class KeyedAddInputsExample method main.
public static void main(String[] args) {
Config config = ResourceAllocator.loadConfig(new HashMap<>());
JobConfig jobConfig = new JobConfig();
BatchTsetExample.submitJob(config, PARALLELISM, jobConfig, KeyedAddInputsExample.class.getName());
}
use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.
the class PersistExample method main.
public static void main(String[] args) {
Config config = ResourceAllocator.loadConfig(new HashMap<>());
JobConfig jobConfig = new JobConfig();
BatchTsetExample.submitJob(config, PARALLELISM, jobConfig, PersistExample.class.getName());
}
use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.
the class SUnionExample method main.
public static void main(String[] args) {
Config config = ResourceAllocator.loadConfig(new HashMap<>());
JobConfig jobConfig = new JobConfig();
BatchTsetExample.submitJob(config, PARALLELISM * 2, jobConfig, SUnionExample.class.getName());
}
use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.
the class CSVInputFormatTest method testUniqueSchedules.
/**
* To test the CSV Input Format
*/
@Test
public void testUniqueSchedules() throws IOException {
Config config = getConfig();
Path path = new Path("/tmp/dinput/");
createOutputFile(path, config);
LocalCSVInputPartitioner csvInputPartitioner = new LocalCSVInputPartitioner(path, 4, config);
csvInputPartitioner.configure(config);
FileInputSplit[] inputSplits = csvInputPartitioner.createInputSplits(2);
LOG.info("input split values are:" + Arrays.toString(inputSplits));
InputSplitAssigner inputSplitAssigner = csvInputPartitioner.getInputSplitAssigner(inputSplits);
InputSplit inputSplit = inputSplitAssigner.getNextInputSplit("localhost", 0);
inputSplit.open(config);
do {
inputSplit.nextRecord(null);
} while (!inputSplit.reachedEnd());
}
use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.
the class TaskExecutor method execute.
/**
* Execute a plan and a graph. This call blocks until the execution finishes. In case of
* streaming, this call doesn't return while for batch computations it returns after
* the execution is done.
*
* @param taskConfig the user configuration to be passed to the task instances
* @param graph the dataflow graph
* @param plan the execution plan
*/
public void execute(Config taskConfig, ComputeGraph graph, ExecutionPlan plan) {
Config newCfg = Config.newBuilder().putAll(config).putAll(taskConfig).build();
IExecutor ex = executor.getExecutor(newCfg, plan, graph.getOperationMode(), new ExecutionHookImpl(config, dataObjectMap, plan, currentExecutors));
ex.execute();
ex.closeExecution();
}
Aggregations