Search in sources :

Example 76 with Config

use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.

the class BranchingExample method main.

public static void main(String[] args) {
    Config config = ResourceAllocator.loadConfig(new HashMap<>());
    JobConfig jobConfig = new JobConfig();
    BatchTsetExample.submitJob(config, PARALLELISM, jobConfig, BranchingExample.class.getName());
}
Also used : Config(edu.iu.dsc.tws.api.config.Config) JobConfig(edu.iu.dsc.tws.api.JobConfig) JobConfig(edu.iu.dsc.tws.api.JobConfig)

Example 77 with Config

use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.

the class CacheExample method main.

public static void main(String[] args) {
    Config config = ResourceAllocator.loadConfig(new HashMap<>());
    JobConfig jobConfig = new JobConfig();
    BatchTsetExample.submitJob(config, PARALLELISM, jobConfig, CacheExample.class.getName());
}
Also used : Config(edu.iu.dsc.tws.api.config.Config) JobConfig(edu.iu.dsc.tws.api.JobConfig) JobConfig(edu.iu.dsc.tws.api.JobConfig)

Example 78 with Config

use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.

the class WordCount method main.

public static void main(String[] args) {
    // first load the configurations from command line and config files
    Config config = ResourceAllocator.loadConfig(new HashMap<>());
    JobConfig jobConfig = new JobConfig();
    Twister2Job.Twister2JobBuilder jobBuilder = Twister2Job.newBuilder();
    jobBuilder.setJobName("streaming-wordcount");
    jobBuilder.setWorkerClass(WordCountWorker.class.getName());
    jobBuilder.addComputeResource(1, 512, 4);
    jobBuilder.setConfig(jobConfig);
    // now submit the job
    Twister2Submitter.submitJob(jobBuilder.build(), config);
}
Also used : Config(edu.iu.dsc.tws.api.config.Config) JobConfig(edu.iu.dsc.tws.api.JobConfig) JobConfig(edu.iu.dsc.tws.api.JobConfig) Twister2Job(edu.iu.dsc.tws.api.Twister2Job)

Example 79 with Config

use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.

the class ExampleTaskMain method main.

public static void main(String[] args) throws ParseException {
    // first load the configurations from command line and config files
    Config config = ResourceAllocator.loadConfig(new HashMap<>());
    Options options = new Options();
    options.addOption(Constants.ARGS_WORKERS, true, "Workers");
    options.addOption(Constants.ARGS_SIZE, true, "Size");
    options.addOption(Constants.ARGS_ITR, true, "Iteration");
    options.addOption(Utils.createOption(Constants.ARGS_OPERATION, true, "Operation", true));
    options.addOption(Constants.ARGS_STREAM, false, "Stream");
    options.addOption(Constants.ARGS_WINDOW, false, "WindowType");
    options.addOption(Utils.createOption(Constants.ARGS_TASK_STAGES, true, "Number of parallel instances of tasks", true));
    options.addOption(Utils.createOption(Constants.ARGS_GAP, true, "Gap", false));
    options.addOption(Utils.createOption(Constants.ARGS_FNAME, true, "File name", false));
    options.addOption(Utils.createOption(Constants.ARGS_OUTSTANDING, true, "Throughput no of messages", false));
    options.addOption(Utils.createOption(Constants.ARGS_THREADS, true, "Threads", false));
    options.addOption(Utils.createOption(Constants.ARGS_PRINT_INTERVAL, true, "Threads", false));
    options.addOption(Utils.createOption(Constants.ARGS_DATA_TYPE, true, "Data", false));
    options.addOption(Utils.createOption(Constants.ARGS_INIT_ITERATIONS, true, "Data", false));
    options.addOption(Constants.ARGS_VERIFY, false, "verify");
    options.addOption(Utils.createOption(BenchmarkMetadata.ARG_BENCHMARK_METADATA, true, "Benchmark Metadata", false));
    options.addOption(Utils.createOption(Constants.ARG_RESOURCE_MEMORY, true, "Instance memory", false));
    CommandLineParser commandLineParser = new DefaultParser();
    CommandLine cmd = commandLineParser.parse(options, args);
    int workers = Integer.parseInt(cmd.getOptionValue(Constants.ARGS_WORKERS));
    int size = Integer.parseInt(cmd.getOptionValue(Constants.ARGS_SIZE));
    int itr = Integer.parseInt(cmd.getOptionValue(Constants.ARGS_ITR));
    String operation = cmd.getOptionValue(Constants.ARGS_OPERATION);
    boolean stream = cmd.hasOption(Constants.ARGS_STREAM);
    boolean window = cmd.hasOption(Constants.ARGS_WINDOW);
    boolean verify = cmd.hasOption(Constants.ARGS_VERIFY);
    String threads = "true";
    if (cmd.hasOption(Constants.ARGS_THREADS)) {
        threads = cmd.getOptionValue(Constants.ARGS_THREADS);
    }
    String taskStages = cmd.getOptionValue(Constants.ARGS_TASK_STAGES);
    String gap = "0";
    if (cmd.hasOption(Constants.ARGS_GAP)) {
        gap = cmd.getOptionValue(Constants.ARGS_GAP);
    }
    String fName = "";
    if (cmd.hasOption(Constants.ARGS_FNAME)) {
        fName = cmd.getOptionValue(Constants.ARGS_FNAME);
    }
    String outstanding = "0";
    if (cmd.hasOption(Constants.ARGS_OUTSTANDING)) {
        outstanding = cmd.getOptionValue(Constants.ARGS_OUTSTANDING);
    }
    String printInt = "1";
    if (cmd.hasOption(Constants.ARGS_PRINT_INTERVAL)) {
        printInt = cmd.getOptionValue(Constants.ARGS_PRINT_INTERVAL);
    }
    String dataType = "default";
    if (cmd.hasOption(Constants.ARGS_DATA_TYPE)) {
        dataType = cmd.getOptionValue(Constants.ARGS_DATA_TYPE);
    }
    String intItr = "0";
    if (cmd.hasOption(Constants.ARGS_INIT_ITERATIONS)) {
        intItr = cmd.getOptionValue(Constants.ARGS_INIT_ITERATIONS);
    }
    boolean runBenchmark = cmd.hasOption(BenchmarkMetadata.ARG_BENCHMARK_METADATA);
    String benchmarkMetadata = null;
    if (runBenchmark) {
        benchmarkMetadata = cmd.getOptionValue(BenchmarkMetadata.ARG_BENCHMARK_METADATA);
    }
    int memory = 1024;
    if (cmd.hasOption(Constants.ARG_RESOURCE_MEMORY)) {
        memory = Integer.parseInt(cmd.getOptionValue(Constants.ARG_RESOURCE_MEMORY));
    }
    // build JobConfig
    JobConfig jobConfig = new JobConfig();
    jobConfig.put(Constants.ARGS_ITR, Integer.toString(itr));
    jobConfig.put(Constants.ARGS_OPERATION, operation);
    jobConfig.put(Constants.ARGS_SIZE, Integer.toString(size));
    jobConfig.put(Constants.ARGS_WORKERS, Integer.toString(workers));
    jobConfig.put(Constants.ARGS_TASK_STAGES, taskStages);
    jobConfig.put(Constants.ARGS_GAP, gap);
    jobConfig.put(Constants.ARGS_FNAME, fName);
    jobConfig.put(Constants.ARGS_OUTSTANDING, outstanding);
    jobConfig.put(Constants.ARGS_THREADS, threads);
    jobConfig.put(Constants.ARGS_PRINT_INTERVAL, printInt);
    jobConfig.put(Constants.ARGS_DATA_TYPE, dataType);
    jobConfig.put(Constants.ARGS_INIT_ITERATIONS, intItr);
    jobConfig.put(Constants.ARGS_VERIFY, verify);
    jobConfig.put(Constants.ARGS_STREAM, stream);
    jobConfig.put(Constants.ARGS_WINDOW, window);
    jobConfig.put(BenchmarkMetadata.ARG_RUN_BENCHMARK, runBenchmark);
    if (runBenchmark) {
        jobConfig.put(BenchmarkMetadata.ARG_BENCHMARK_METADATA, benchmarkMetadata);
    }
    // build the job
    if (!stream) {
        switch(operation) {
            case "reduce":
                submitJob(config, workers, jobConfig, BTReduceExample.class.getName(), memory);
                break;
            case "allreduce":
                submitJob(config, workers, jobConfig, BTAllReduceExample.class.getName(), memory);
                break;
            case "gather":
                submitJob(config, workers, jobConfig, BTGatherExample.class.getName(), memory);
                break;
            case "allgather":
                submitJob(config, workers, jobConfig, BTAllGatherExample.class.getName(), memory);
                break;
            case "bcast":
                submitJob(config, workers, jobConfig, BTBroadCastExample.class.getName(), memory);
                break;
            case "partition":
                submitJob(config, workers, jobConfig, BTPartitionExample.class.getName(), memory);
                break;
            case "direct":
                submitJob(config, workers, jobConfig, BTDirectExample.class.getName(), memory);
                break;
            case "keyed-reduce":
                submitJob(config, workers, jobConfig, BTKeyedReduceExample.class.getName(), memory);
                break;
            case "keyed-gather":
                submitJob(config, workers, jobConfig, BTKeyedGatherExample.class.getName(), memory);
                break;
            case "keyed-partition":
                submitJob(config, workers, jobConfig, BTPartitionKeyedExample.class.getName(), memory);
                break;
            case "join":
                submitJob(config, workers, jobConfig, BTJoinExample.class.getName(), memory);
                break;
        }
    } else {
        switch(operation) {
            case "direct":
                submitJob(config, workers, jobConfig, STWindowExample.class.getName(), memory);
                break;
            case "cdirect":
                submitJob(config, workers, jobConfig, STWindowCustomExample.class.getName(), memory);
                break;
            case "windowmpi":
                submitJob(config, workers, jobConfig, STWindowMPI.class.getName(), memory);
                break;
            case "windowt":
                submitJob(config, workers, jobConfig, STWindowEventTimeExample.class.getName(), memory);
                break;
            case "reduce":
                submitJob(config, workers, jobConfig, STReduceExample.class.getName(), memory);
                break;
            case "allreduce":
                submitJob(config, workers, jobConfig, STAllReduceExample.class.getName(), memory);
                break;
            case "gather":
                submitJob(config, workers, jobConfig, STGatherExample.class.getName(), memory);
                break;
            case "allgather":
                submitJob(config, workers, jobConfig, STAllGatherExample.class.getName(), memory);
                break;
            case "bcast":
                submitJob(config, workers, jobConfig, STBroadCastExample.class.getName(), memory);
                break;
            case "partition":
                submitJob(config, workers, jobConfig, STPartitionExample.class.getName(), memory);
                break;
            case "keyed-reduce":
                submitJob(config, workers, jobConfig, STKeyedReduceExample.class.getName(), memory);
                break;
            case "keyed-gather":
                submitJob(config, workers, jobConfig, STKeyedGatherExample.class.getName(), memory);
                break;
            case "keyed-partition":
                submitJob(config, workers, jobConfig, STPartitionKeyedExample.class.getName(), memory);
                break;
        }
    }
}
Also used : Options(org.apache.commons.cli.Options) STWindowExample(edu.iu.dsc.tws.examples.task.streaming.windowing.STWindowExample) STWindowCustomExample(edu.iu.dsc.tws.examples.task.streaming.windowing.STWindowCustomExample) BTReduceExample(edu.iu.dsc.tws.examples.task.batch.BTReduceExample) BTAllGatherExample(edu.iu.dsc.tws.examples.task.batch.BTAllGatherExample) Config(edu.iu.dsc.tws.api.config.Config) JobConfig(edu.iu.dsc.tws.api.JobConfig) BTKeyedGatherExample(edu.iu.dsc.tws.examples.task.batch.BTKeyedGatherExample) STReduceExample(edu.iu.dsc.tws.examples.task.streaming.STReduceExample) BTBroadCastExample(edu.iu.dsc.tws.examples.task.batch.BTBroadCastExample) JobConfig(edu.iu.dsc.tws.api.JobConfig) BTPartitionExample(edu.iu.dsc.tws.examples.task.batch.BTPartitionExample) BTKeyedReduceExample(edu.iu.dsc.tws.examples.task.batch.BTKeyedReduceExample) STAllReduceExample(edu.iu.dsc.tws.examples.task.streaming.STAllReduceExample) BTAllReduceExample(edu.iu.dsc.tws.examples.task.batch.BTAllReduceExample) BTJoinExample(edu.iu.dsc.tws.examples.task.batch.BTJoinExample) STKeyedGatherExample(edu.iu.dsc.tws.examples.task.streaming.STKeyedGatherExample) CommandLineParser(org.apache.commons.cli.CommandLineParser) DefaultParser(org.apache.commons.cli.DefaultParser) STGatherExample(edu.iu.dsc.tws.examples.task.streaming.STGatherExample) BTGatherExample(edu.iu.dsc.tws.examples.task.batch.BTGatherExample) BTPartitionKeyedExample(edu.iu.dsc.tws.examples.task.batch.BTPartitionKeyedExample) STBroadCastExample(edu.iu.dsc.tws.examples.task.streaming.STBroadCastExample) STPartitionExample(edu.iu.dsc.tws.examples.task.streaming.STPartitionExample) STPartitionKeyedExample(edu.iu.dsc.tws.examples.task.streaming.STPartitionKeyedExample) STKeyedReduceExample(edu.iu.dsc.tws.examples.task.streaming.STKeyedReduceExample) CommandLine(org.apache.commons.cli.CommandLine) STWindowEventTimeExample(edu.iu.dsc.tws.examples.task.streaming.windowing.STWindowEventTimeExample) STWindowMPI(edu.iu.dsc.tws.examples.task.streaming.windowing.STWindowMPI) STAllGatherExample(edu.iu.dsc.tws.examples.task.streaming.STAllGatherExample) BTDirectExample(edu.iu.dsc.tws.examples.task.batch.BTDirectExample)

Example 80 with Config

use of edu.iu.dsc.tws.api.config.Config in project twister2 by DSC-SPIDAL.

the class HelloTSet method main.

public static void main(String[] args) throws ParseException {
    // first load the configurations from command line and config files
    Options options = new Options();
    options.addOption("para", true, "Workers");
    CommandLineParser commandLineParser = new DefaultParser();
    CommandLine cmd = commandLineParser.parse(options, args);
    Config config = ResourceAllocator.loadConfig(new HashMap<>());
    int para = Integer.parseInt(cmd.getOptionValue("para"));
    // build JobConfig
    JobConfig jobConfig = new JobConfig();
    jobConfig.put("para", Integer.toString(para));
    submitJob(config, para, jobConfig, HelloTSet.class.getName());
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) Config(edu.iu.dsc.tws.api.config.Config) JobConfig(edu.iu.dsc.tws.api.JobConfig) CommandLineParser(org.apache.commons.cli.CommandLineParser) JobConfig(edu.iu.dsc.tws.api.JobConfig) DefaultParser(org.apache.commons.cli.DefaultParser)

Aggregations

Config (edu.iu.dsc.tws.api.config.Config)169 JobConfig (edu.iu.dsc.tws.api.JobConfig)101 Twister2Job (edu.iu.dsc.tws.api.Twister2Job)52 CommandLine (org.apache.commons.cli.CommandLine)27 CommandLineParser (org.apache.commons.cli.CommandLineParser)27 DefaultParser (org.apache.commons.cli.DefaultParser)27 Options (org.apache.commons.cli.Options)27 HashMap (java.util.HashMap)26 ComputeGraph (edu.iu.dsc.tws.api.compute.graph.ComputeGraph)18 Map (java.util.Map)15 TaskSchedulePlan (edu.iu.dsc.tws.api.compute.schedule.elements.TaskSchedulePlan)13 WorkerPlan (edu.iu.dsc.tws.api.compute.schedule.elements.WorkerPlan)12 LinkedHashMap (java.util.LinkedHashMap)12 Test (org.junit.Test)12 Path (edu.iu.dsc.tws.api.data.Path)10 TaskInstancePlan (edu.iu.dsc.tws.api.compute.schedule.elements.TaskInstancePlan)9 WorkerSchedulePlan (edu.iu.dsc.tws.api.compute.schedule.elements.WorkerSchedulePlan)9 JobAPI (edu.iu.dsc.tws.proto.system.job.JobAPI)9 TaskSchedulerClassTest (edu.iu.dsc.tws.tsched.utils.TaskSchedulerClassTest)9 ExecutionPlan (edu.iu.dsc.tws.api.compute.executor.ExecutionPlan)8