use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.
the class BranchingExample method main.
public static void main(String[] args) {
Config config = ResourceAllocator.loadConfig(new HashMap<>());
JobConfig jobConfig = new JobConfig();
BatchTsetExample.submitJob(config, PARALLELISM, jobConfig, BranchingExample.class.getName());
}
use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.
the class CacheExample method main.
public static void main(String[] args) {
Config config = ResourceAllocator.loadConfig(new HashMap<>());
JobConfig jobConfig = new JobConfig();
BatchTsetExample.submitJob(config, PARALLELISM, jobConfig, CacheExample.class.getName());
}
use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.
the class WordCount method main.
public static void main(String[] args) {
// first load the configurations from command line and config files
Config config = ResourceAllocator.loadConfig(new HashMap<>());
JobConfig jobConfig = new JobConfig();
Twister2Job.Twister2JobBuilder jobBuilder = Twister2Job.newBuilder();
jobBuilder.setJobName("streaming-wordcount");
jobBuilder.setWorkerClass(WordCountWorker.class.getName());
jobBuilder.addComputeResource(1, 512, 4);
jobBuilder.setConfig(jobConfig);
// now submit the job
Twister2Submitter.submitJob(jobBuilder.build(), config);
}
use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.
the class ExampleTaskMain method main.
public static void main(String[] args) throws ParseException {
// first load the configurations from command line and config files
Config config = ResourceAllocator.loadConfig(new HashMap<>());
Options options = new Options();
options.addOption(Constants.ARGS_WORKERS, true, "Workers");
options.addOption(Constants.ARGS_SIZE, true, "Size");
options.addOption(Constants.ARGS_ITR, true, "Iteration");
options.addOption(Utils.createOption(Constants.ARGS_OPERATION, true, "Operation", true));
options.addOption(Constants.ARGS_STREAM, false, "Stream");
options.addOption(Constants.ARGS_WINDOW, false, "WindowType");
options.addOption(Utils.createOption(Constants.ARGS_TASK_STAGES, true, "Number of parallel instances of tasks", true));
options.addOption(Utils.createOption(Constants.ARGS_GAP, true, "Gap", false));
options.addOption(Utils.createOption(Constants.ARGS_FNAME, true, "File name", false));
options.addOption(Utils.createOption(Constants.ARGS_OUTSTANDING, true, "Throughput no of messages", false));
options.addOption(Utils.createOption(Constants.ARGS_THREADS, true, "Threads", false));
options.addOption(Utils.createOption(Constants.ARGS_PRINT_INTERVAL, true, "Threads", false));
options.addOption(Utils.createOption(Constants.ARGS_DATA_TYPE, true, "Data", false));
options.addOption(Utils.createOption(Constants.ARGS_INIT_ITERATIONS, true, "Data", false));
options.addOption(Constants.ARGS_VERIFY, false, "verify");
options.addOption(Utils.createOption(BenchmarkMetadata.ARG_BENCHMARK_METADATA, true, "Benchmark Metadata", false));
options.addOption(Utils.createOption(Constants.ARG_RESOURCE_MEMORY, true, "Instance memory", false));
CommandLineParser commandLineParser = new DefaultParser();
CommandLine cmd = commandLineParser.parse(options, args);
int workers = Integer.parseInt(cmd.getOptionValue(Constants.ARGS_WORKERS));
int size = Integer.parseInt(cmd.getOptionValue(Constants.ARGS_SIZE));
int itr = Integer.parseInt(cmd.getOptionValue(Constants.ARGS_ITR));
String operation = cmd.getOptionValue(Constants.ARGS_OPERATION);
boolean stream = cmd.hasOption(Constants.ARGS_STREAM);
boolean window = cmd.hasOption(Constants.ARGS_WINDOW);
boolean verify = cmd.hasOption(Constants.ARGS_VERIFY);
String threads = "true";
if (cmd.hasOption(Constants.ARGS_THREADS)) {
threads = cmd.getOptionValue(Constants.ARGS_THREADS);
}
String taskStages = cmd.getOptionValue(Constants.ARGS_TASK_STAGES);
String gap = "0";
if (cmd.hasOption(Constants.ARGS_GAP)) {
gap = cmd.getOptionValue(Constants.ARGS_GAP);
}
String fName = "";
if (cmd.hasOption(Constants.ARGS_FNAME)) {
fName = cmd.getOptionValue(Constants.ARGS_FNAME);
}
String outstanding = "0";
if (cmd.hasOption(Constants.ARGS_OUTSTANDING)) {
outstanding = cmd.getOptionValue(Constants.ARGS_OUTSTANDING);
}
String printInt = "1";
if (cmd.hasOption(Constants.ARGS_PRINT_INTERVAL)) {
printInt = cmd.getOptionValue(Constants.ARGS_PRINT_INTERVAL);
}
String dataType = "default";
if (cmd.hasOption(Constants.ARGS_DATA_TYPE)) {
dataType = cmd.getOptionValue(Constants.ARGS_DATA_TYPE);
}
String intItr = "0";
if (cmd.hasOption(Constants.ARGS_INIT_ITERATIONS)) {
intItr = cmd.getOptionValue(Constants.ARGS_INIT_ITERATIONS);
}
boolean runBenchmark = cmd.hasOption(BenchmarkMetadata.ARG_BENCHMARK_METADATA);
String benchmarkMetadata = null;
if (runBenchmark) {
benchmarkMetadata = cmd.getOptionValue(BenchmarkMetadata.ARG_BENCHMARK_METADATA);
}
int memory = 1024;
if (cmd.hasOption(Constants.ARG_RESOURCE_MEMORY)) {
memory = Integer.parseInt(cmd.getOptionValue(Constants.ARG_RESOURCE_MEMORY));
}
// build JobConfig
JobConfig jobConfig = new JobConfig();
jobConfig.put(Constants.ARGS_ITR, Integer.toString(itr));
jobConfig.put(Constants.ARGS_OPERATION, operation);
jobConfig.put(Constants.ARGS_SIZE, Integer.toString(size));
jobConfig.put(Constants.ARGS_WORKERS, Integer.toString(workers));
jobConfig.put(Constants.ARGS_TASK_STAGES, taskStages);
jobConfig.put(Constants.ARGS_GAP, gap);
jobConfig.put(Constants.ARGS_FNAME, fName);
jobConfig.put(Constants.ARGS_OUTSTANDING, outstanding);
jobConfig.put(Constants.ARGS_THREADS, threads);
jobConfig.put(Constants.ARGS_PRINT_INTERVAL, printInt);
jobConfig.put(Constants.ARGS_DATA_TYPE, dataType);
jobConfig.put(Constants.ARGS_INIT_ITERATIONS, intItr);
jobConfig.put(Constants.ARGS_VERIFY, verify);
jobConfig.put(Constants.ARGS_STREAM, stream);
jobConfig.put(Constants.ARGS_WINDOW, window);
jobConfig.put(BenchmarkMetadata.ARG_RUN_BENCHMARK, runBenchmark);
if (runBenchmark) {
jobConfig.put(BenchmarkMetadata.ARG_BENCHMARK_METADATA, benchmarkMetadata);
}
// build the job
if (!stream) {
switch(operation) {
case "reduce":
submitJob(config, workers, jobConfig, BTReduceExample.class.getName(), memory);
break;
case "allreduce":
submitJob(config, workers, jobConfig, BTAllReduceExample.class.getName(), memory);
break;
case "gather":
submitJob(config, workers, jobConfig, BTGatherExample.class.getName(), memory);
break;
case "allgather":
submitJob(config, workers, jobConfig, BTAllGatherExample.class.getName(), memory);
break;
case "bcast":
submitJob(config, workers, jobConfig, BTBroadCastExample.class.getName(), memory);
break;
case "partition":
submitJob(config, workers, jobConfig, BTPartitionExample.class.getName(), memory);
break;
case "direct":
submitJob(config, workers, jobConfig, BTDirectExample.class.getName(), memory);
break;
case "keyed-reduce":
submitJob(config, workers, jobConfig, BTKeyedReduceExample.class.getName(), memory);
break;
case "keyed-gather":
submitJob(config, workers, jobConfig, BTKeyedGatherExample.class.getName(), memory);
break;
case "keyed-partition":
submitJob(config, workers, jobConfig, BTPartitionKeyedExample.class.getName(), memory);
break;
case "join":
submitJob(config, workers, jobConfig, BTJoinExample.class.getName(), memory);
break;
}
} else {
switch(operation) {
case "direct":
submitJob(config, workers, jobConfig, STWindowExample.class.getName(), memory);
break;
case "cdirect":
submitJob(config, workers, jobConfig, STWindowCustomExample.class.getName(), memory);
break;
case "windowmpi":
submitJob(config, workers, jobConfig, STWindowMPI.class.getName(), memory);
break;
case "windowt":
submitJob(config, workers, jobConfig, STWindowEventTimeExample.class.getName(), memory);
break;
case "reduce":
submitJob(config, workers, jobConfig, STReduceExample.class.getName(), memory);
break;
case "allreduce":
submitJob(config, workers, jobConfig, STAllReduceExample.class.getName(), memory);
break;
case "gather":
submitJob(config, workers, jobConfig, STGatherExample.class.getName(), memory);
break;
case "allgather":
submitJob(config, workers, jobConfig, STAllGatherExample.class.getName(), memory);
break;
case "bcast":
submitJob(config, workers, jobConfig, STBroadCastExample.class.getName(), memory);
break;
case "partition":
submitJob(config, workers, jobConfig, STPartitionExample.class.getName(), memory);
break;
case "keyed-reduce":
submitJob(config, workers, jobConfig, STKeyedReduceExample.class.getName(), memory);
break;
case "keyed-gather":
submitJob(config, workers, jobConfig, STKeyedGatherExample.class.getName(), memory);
break;
case "keyed-partition":
submitJob(config, workers, jobConfig, STPartitionKeyedExample.class.getName(), memory);
break;
}
}
}
use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.
the class HelloTSet method main.
public static void main(String[] args) throws ParseException {
// first load the configurations from command line and config files
Options options = new Options();
options.addOption("para", true, "Workers");
CommandLineParser commandLineParser = new DefaultParser();
CommandLine cmd = commandLineParser.parse(options, args);
Config config = ResourceAllocator.loadConfig(new HashMap<>());
int para = Integer.parseInt(cmd.getOptionValue("para"));
// build JobConfig
JobConfig jobConfig = new JobConfig();
jobConfig.put("para", Integer.toString(para));
submitJob(config, para, jobConfig, HelloTSet.class.getName());
}
Aggregations