Search in sources :

Example 66 with DefaultParser

use of org.apache.commons.cli.DefaultParser in project twister2 by DSC-SPIDAL.

the class ArrowTSetSourceExample method main.

public static void main(String[] args) throws Exception {
    LOG.log(Level.INFO, "Starting Twister2 Arrow Job");
    // first load the configurations from command line and config files
    Config config = ResourceAllocator.loadConfig(new HashMap<>());
    Options options = new Options();
    options.addOption(Utils.createOption(DataObjectConstants.PARALLELISM_VALUE, true, "Parallelism", true));
    options.addOption(Utils.createOption(DataObjectConstants.WORKERS, true, "Workers", true));
    options.addOption(Utils.createOption(DataObjectConstants.DSIZE, true, "100", true));
    options.addOption(Utils.createOption(DataObjectConstants.DINPUT_DIRECTORY, true, "CSV Input Directory", true));
    options.addOption(Utils.createOption(DataObjectConstants.ARROW_DIRECTORY, true, "Arrow Input Directory", true));
    options.addOption(Utils.createOption(DataObjectConstants.FILE_NAME, true, "Arrow File Name", true));
    CommandLineParser commandLineParser = new DefaultParser();
    CommandLine cmd = commandLineParser.parse(options, args);
    int parallelism = Integer.parseInt(cmd.getOptionValue(DataObjectConstants.PARALLELISM_VALUE));
    int workers = Integer.parseInt(cmd.getOptionValue(DataObjectConstants.WORKERS));
    int dsize = Integer.parseInt(cmd.getOptionValue(DataObjectConstants.DSIZE));
    String csvInputDirectory = cmd.getOptionValue(DataObjectConstants.DINPUT_DIRECTORY);
    String arrowInputDirectory = cmd.getOptionValue(DataObjectConstants.ARROW_DIRECTORY);
    String arrowFileName = cmd.getOptionValue(DataObjectConstants.FILE_NAME);
    Twister2Job.Twister2JobBuilder jobBuilder = Twister2Job.newBuilder();
    JobConfig jobConfig = new JobConfig();
    jobConfig.put(DataObjectConstants.PARALLELISM_VALUE, parallelism);
    jobConfig.put(DataObjectConstants.WORKERS, workers);
    jobConfig.put(DataObjectConstants.DSIZE, dsize);
    jobConfig.put(DataObjectConstants.DINPUT_DIRECTORY, csvInputDirectory);
    jobConfig.put(DataObjectConstants.ARROW_DIRECTORY, arrowInputDirectory);
    jobConfig.put(DataObjectConstants.FILE_NAME, arrowFileName);
    jobBuilder.setJobName("Arrow Testing Example");
    jobBuilder.setWorkerClass(ArrowTSetSourceExample.class);
    jobBuilder.addComputeResource(1, 512, 2, workers);
    jobBuilder.setConfig(jobConfig);
    // now submit the job
    Twister2Submitter.submitJob(jobBuilder.build(), config);
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) Config(edu.iu.dsc.tws.api.config.Config) JobConfig(edu.iu.dsc.tws.api.JobConfig) CommandLineParser(org.apache.commons.cli.CommandLineParser) Twister2Job(edu.iu.dsc.tws.api.Twister2Job) JobConfig(edu.iu.dsc.tws.api.JobConfig) DefaultParser(org.apache.commons.cli.DefaultParser)

Example 67 with DefaultParser

use of org.apache.commons.cli.DefaultParser in project twister2 by DSC-SPIDAL.

the class TSetExample method main.

public static void main(String[] args) throws ParseException {
    // first load the configurations from command line and config files
    Config config = ResourceAllocator.loadConfig(new HashMap<>());
    // build JobConfig
    HashMap<String, Object> configurations = new HashMap<>();
    configurations.put(SchedulerContext.THREADS_PER_WORKER, 1);
    Options options = new Options();
    options.addOption(CDFConstants.ARGS_PARALLELISM_VALUE, true, "2");
    options.addOption(CDFConstants.ARGS_WORKERS, true, "2");
    @SuppressWarnings("deprecation") CommandLineParser commandLineParser = new DefaultParser();
    CommandLine commandLine = commandLineParser.parse(options, args);
    int instances = Integer.parseInt(commandLine.getOptionValue(CDFConstants.ARGS_WORKERS));
    int parallelismValue = Integer.parseInt(commandLine.getOptionValue(CDFConstants.ARGS_PARALLELISM_VALUE));
    configurations.put(CDFConstants.ARGS_WORKERS, Integer.toString(instances));
    configurations.put(CDFConstants.ARGS_PARALLELISM_VALUE, Integer.toString(parallelismValue));
    // build JobConfig
    JobConfig jobConfig = new JobConfig();
    jobConfig.putAll(configurations);
    config = Config.newBuilder().putAll(config).put(SchedulerContext.DRIVER_CLASS, null).build();
    Twister2Job twister2Job;
    twister2Job = Twister2Job.newBuilder().setWorkerClass(CDFWWorker.class).setJobName(TSetExample.class.getName()).setDriverClass(TSetExample.Driver.class.getName()).addComputeResource(1, 512, instances).setConfig(jobConfig).build();
    // now submit the job
    Twister2Submitter.submitJob(twister2Job, config);
}
Also used : Options(org.apache.commons.cli.Options) HashMap(java.util.HashMap) Config(edu.iu.dsc.tws.api.config.Config) JobConfig(edu.iu.dsc.tws.api.JobConfig) BatchTSetBaseDriver(edu.iu.dsc.tws.tset.cdfw.BatchTSetBaseDriver) JobConfig(edu.iu.dsc.tws.api.JobConfig) Twister2Job(edu.iu.dsc.tws.api.Twister2Job) CommandLine(org.apache.commons.cli.CommandLine) CommandLineParser(org.apache.commons.cli.CommandLineParser) DefaultParser(org.apache.commons.cli.DefaultParser)

Example 68 with DefaultParser

use of org.apache.commons.cli.DefaultParser in project twister2 by DSC-SPIDAL.

the class ExampleTaskMain method main.

public static void main(String[] args) throws ParseException {
    // first load the configurations from command line and config files
    Config config = ResourceAllocator.loadConfig(new HashMap<>());
    Options options = new Options();
    options.addOption(Constants.ARGS_WORKERS, true, "Workers");
    options.addOption(Constants.ARGS_SIZE, true, "Size");
    options.addOption(Constants.ARGS_ITR, true, "Iteration");
    options.addOption(Utils.createOption(Constants.ARGS_OPERATION, true, "Operation", true));
    options.addOption(Constants.ARGS_STREAM, false, "Stream");
    options.addOption(Constants.ARGS_WINDOW, false, "WindowType");
    options.addOption(Utils.createOption(Constants.ARGS_TASK_STAGES, true, "Number of parallel instances of tasks", true));
    options.addOption(Utils.createOption(Constants.ARGS_GAP, true, "Gap", false));
    options.addOption(Utils.createOption(Constants.ARGS_FNAME, true, "File name", false));
    options.addOption(Utils.createOption(Constants.ARGS_OUTSTANDING, true, "Throughput no of messages", false));
    options.addOption(Utils.createOption(Constants.ARGS_THREADS, true, "Threads", false));
    options.addOption(Utils.createOption(Constants.ARGS_PRINT_INTERVAL, true, "Threads", false));
    options.addOption(Utils.createOption(Constants.ARGS_DATA_TYPE, true, "Data", false));
    options.addOption(Utils.createOption(Constants.ARGS_INIT_ITERATIONS, true, "Data", false));
    options.addOption(Constants.ARGS_VERIFY, false, "verify");
    options.addOption(Utils.createOption(BenchmarkMetadata.ARG_BENCHMARK_METADATA, true, "Benchmark Metadata", false));
    options.addOption(Utils.createOption(Constants.ARG_RESOURCE_MEMORY, true, "Instance memory", false));
    CommandLineParser commandLineParser = new DefaultParser();
    CommandLine cmd = commandLineParser.parse(options, args);
    int workers = Integer.parseInt(cmd.getOptionValue(Constants.ARGS_WORKERS));
    int size = Integer.parseInt(cmd.getOptionValue(Constants.ARGS_SIZE));
    int itr = Integer.parseInt(cmd.getOptionValue(Constants.ARGS_ITR));
    String operation = cmd.getOptionValue(Constants.ARGS_OPERATION);
    boolean stream = cmd.hasOption(Constants.ARGS_STREAM);
    boolean window = cmd.hasOption(Constants.ARGS_WINDOW);
    boolean verify = cmd.hasOption(Constants.ARGS_VERIFY);
    String threads = "true";
    if (cmd.hasOption(Constants.ARGS_THREADS)) {
        threads = cmd.getOptionValue(Constants.ARGS_THREADS);
    }
    String taskStages = cmd.getOptionValue(Constants.ARGS_TASK_STAGES);
    String gap = "0";
    if (cmd.hasOption(Constants.ARGS_GAP)) {
        gap = cmd.getOptionValue(Constants.ARGS_GAP);
    }
    String fName = "";
    if (cmd.hasOption(Constants.ARGS_FNAME)) {
        fName = cmd.getOptionValue(Constants.ARGS_FNAME);
    }
    String outstanding = "0";
    if (cmd.hasOption(Constants.ARGS_OUTSTANDING)) {
        outstanding = cmd.getOptionValue(Constants.ARGS_OUTSTANDING);
    }
    String printInt = "1";
    if (cmd.hasOption(Constants.ARGS_PRINT_INTERVAL)) {
        printInt = cmd.getOptionValue(Constants.ARGS_PRINT_INTERVAL);
    }
    String dataType = "default";
    if (cmd.hasOption(Constants.ARGS_DATA_TYPE)) {
        dataType = cmd.getOptionValue(Constants.ARGS_DATA_TYPE);
    }
    String intItr = "0";
    if (cmd.hasOption(Constants.ARGS_INIT_ITERATIONS)) {
        intItr = cmd.getOptionValue(Constants.ARGS_INIT_ITERATIONS);
    }
    boolean runBenchmark = cmd.hasOption(BenchmarkMetadata.ARG_BENCHMARK_METADATA);
    String benchmarkMetadata = null;
    if (runBenchmark) {
        benchmarkMetadata = cmd.getOptionValue(BenchmarkMetadata.ARG_BENCHMARK_METADATA);
    }
    int memory = 1024;
    if (cmd.hasOption(Constants.ARG_RESOURCE_MEMORY)) {
        memory = Integer.parseInt(cmd.getOptionValue(Constants.ARG_RESOURCE_MEMORY));
    }
    // build JobConfig
    JobConfig jobConfig = new JobConfig();
    jobConfig.put(Constants.ARGS_ITR, Integer.toString(itr));
    jobConfig.put(Constants.ARGS_OPERATION, operation);
    jobConfig.put(Constants.ARGS_SIZE, Integer.toString(size));
    jobConfig.put(Constants.ARGS_WORKERS, Integer.toString(workers));
    jobConfig.put(Constants.ARGS_TASK_STAGES, taskStages);
    jobConfig.put(Constants.ARGS_GAP, gap);
    jobConfig.put(Constants.ARGS_FNAME, fName);
    jobConfig.put(Constants.ARGS_OUTSTANDING, outstanding);
    jobConfig.put(Constants.ARGS_THREADS, threads);
    jobConfig.put(Constants.ARGS_PRINT_INTERVAL, printInt);
    jobConfig.put(Constants.ARGS_DATA_TYPE, dataType);
    jobConfig.put(Constants.ARGS_INIT_ITERATIONS, intItr);
    jobConfig.put(Constants.ARGS_VERIFY, verify);
    jobConfig.put(Constants.ARGS_STREAM, stream);
    jobConfig.put(Constants.ARGS_WINDOW, window);
    jobConfig.put(BenchmarkMetadata.ARG_RUN_BENCHMARK, runBenchmark);
    if (runBenchmark) {
        jobConfig.put(BenchmarkMetadata.ARG_BENCHMARK_METADATA, benchmarkMetadata);
    }
    // build the job
    if (!stream) {
        switch(operation) {
            case "reduce":
                submitJob(config, workers, jobConfig, BTReduceExample.class.getName(), memory);
                break;
            case "allreduce":
                submitJob(config, workers, jobConfig, BTAllReduceExample.class.getName(), memory);
                break;
            case "gather":
                submitJob(config, workers, jobConfig, BTGatherExample.class.getName(), memory);
                break;
            case "allgather":
                submitJob(config, workers, jobConfig, BTAllGatherExample.class.getName(), memory);
                break;
            case "bcast":
                submitJob(config, workers, jobConfig, BTBroadCastExample.class.getName(), memory);
                break;
            case "partition":
                submitJob(config, workers, jobConfig, BTPartitionExample.class.getName(), memory);
                break;
            case "direct":
                submitJob(config, workers, jobConfig, BTDirectExample.class.getName(), memory);
                break;
            case "keyed-reduce":
                submitJob(config, workers, jobConfig, BTKeyedReduceExample.class.getName(), memory);
                break;
            case "keyed-gather":
                submitJob(config, workers, jobConfig, BTKeyedGatherExample.class.getName(), memory);
                break;
            case "keyed-partition":
                submitJob(config, workers, jobConfig, BTPartitionKeyedExample.class.getName(), memory);
                break;
            case "join":
                submitJob(config, workers, jobConfig, BTJoinExample.class.getName(), memory);
                break;
        }
    } else {
        switch(operation) {
            case "direct":
                submitJob(config, workers, jobConfig, STWindowExample.class.getName(), memory);
                break;
            case "cdirect":
                submitJob(config, workers, jobConfig, STWindowCustomExample.class.getName(), memory);
                break;
            case "windowmpi":
                submitJob(config, workers, jobConfig, STWindowMPI.class.getName(), memory);
                break;
            case "windowt":
                submitJob(config, workers, jobConfig, STWindowEventTimeExample.class.getName(), memory);
                break;
            case "reduce":
                submitJob(config, workers, jobConfig, STReduceExample.class.getName(), memory);
                break;
            case "allreduce":
                submitJob(config, workers, jobConfig, STAllReduceExample.class.getName(), memory);
                break;
            case "gather":
                submitJob(config, workers, jobConfig, STGatherExample.class.getName(), memory);
                break;
            case "allgather":
                submitJob(config, workers, jobConfig, STAllGatherExample.class.getName(), memory);
                break;
            case "bcast":
                submitJob(config, workers, jobConfig, STBroadCastExample.class.getName(), memory);
                break;
            case "partition":
                submitJob(config, workers, jobConfig, STPartitionExample.class.getName(), memory);
                break;
            case "keyed-reduce":
                submitJob(config, workers, jobConfig, STKeyedReduceExample.class.getName(), memory);
                break;
            case "keyed-gather":
                submitJob(config, workers, jobConfig, STKeyedGatherExample.class.getName(), memory);
                break;
            case "keyed-partition":
                submitJob(config, workers, jobConfig, STPartitionKeyedExample.class.getName(), memory);
                break;
        }
    }
}
Also used : Options(org.apache.commons.cli.Options) STWindowExample(edu.iu.dsc.tws.examples.task.streaming.windowing.STWindowExample) STWindowCustomExample(edu.iu.dsc.tws.examples.task.streaming.windowing.STWindowCustomExample) BTReduceExample(edu.iu.dsc.tws.examples.task.batch.BTReduceExample) BTAllGatherExample(edu.iu.dsc.tws.examples.task.batch.BTAllGatherExample) Config(edu.iu.dsc.tws.api.config.Config) JobConfig(edu.iu.dsc.tws.api.JobConfig) BTKeyedGatherExample(edu.iu.dsc.tws.examples.task.batch.BTKeyedGatherExample) STReduceExample(edu.iu.dsc.tws.examples.task.streaming.STReduceExample) BTBroadCastExample(edu.iu.dsc.tws.examples.task.batch.BTBroadCastExample) JobConfig(edu.iu.dsc.tws.api.JobConfig) BTPartitionExample(edu.iu.dsc.tws.examples.task.batch.BTPartitionExample) BTKeyedReduceExample(edu.iu.dsc.tws.examples.task.batch.BTKeyedReduceExample) STAllReduceExample(edu.iu.dsc.tws.examples.task.streaming.STAllReduceExample) BTAllReduceExample(edu.iu.dsc.tws.examples.task.batch.BTAllReduceExample) BTJoinExample(edu.iu.dsc.tws.examples.task.batch.BTJoinExample) STKeyedGatherExample(edu.iu.dsc.tws.examples.task.streaming.STKeyedGatherExample) CommandLineParser(org.apache.commons.cli.CommandLineParser) DefaultParser(org.apache.commons.cli.DefaultParser) STGatherExample(edu.iu.dsc.tws.examples.task.streaming.STGatherExample) BTGatherExample(edu.iu.dsc.tws.examples.task.batch.BTGatherExample) BTPartitionKeyedExample(edu.iu.dsc.tws.examples.task.batch.BTPartitionKeyedExample) STBroadCastExample(edu.iu.dsc.tws.examples.task.streaming.STBroadCastExample) STPartitionExample(edu.iu.dsc.tws.examples.task.streaming.STPartitionExample) STPartitionKeyedExample(edu.iu.dsc.tws.examples.task.streaming.STPartitionKeyedExample) STKeyedReduceExample(edu.iu.dsc.tws.examples.task.streaming.STKeyedReduceExample) CommandLine(org.apache.commons.cli.CommandLine) STWindowEventTimeExample(edu.iu.dsc.tws.examples.task.streaming.windowing.STWindowEventTimeExample) STWindowMPI(edu.iu.dsc.tws.examples.task.streaming.windowing.STWindowMPI) STAllGatherExample(edu.iu.dsc.tws.examples.task.streaming.STAllGatherExample) BTDirectExample(edu.iu.dsc.tws.examples.task.batch.BTDirectExample)

Example 69 with DefaultParser

use of org.apache.commons.cli.DefaultParser in project twister2 by DSC-SPIDAL.

the class HelloTSet method main.

public static void main(String[] args) throws ParseException {
    // first load the configurations from command line and config files
    Options options = new Options();
    options.addOption("para", true, "Workers");
    CommandLineParser commandLineParser = new DefaultParser();
    CommandLine cmd = commandLineParser.parse(options, args);
    Config config = ResourceAllocator.loadConfig(new HashMap<>());
    int para = Integer.parseInt(cmd.getOptionValue("para"));
    // build JobConfig
    JobConfig jobConfig = new JobConfig();
    jobConfig.put("para", Integer.toString(para));
    submitJob(config, para, jobConfig, HelloTSet.class.getName());
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) Config(edu.iu.dsc.tws.api.config.Config) JobConfig(edu.iu.dsc.tws.api.JobConfig) CommandLineParser(org.apache.commons.cli.CommandLineParser) JobConfig(edu.iu.dsc.tws.api.JobConfig) DefaultParser(org.apache.commons.cli.DefaultParser)

Example 70 with DefaultParser

use of org.apache.commons.cli.DefaultParser in project incubator-gobblin by apache.

the class JobCommand method run.

@Override
public void run(String[] args) throws Exception {
    this.options = createCommandLineOptions();
    DefaultParser parser = new DefaultParser();
    AdminClient adminClient = null;
    try {
        CommandLine parsedOpts = parser.parse(options, args);
        int resultLimit = parseResultsLimit(parsedOpts);
        String host = parsedOpts.hasOption(ADMIN_SERVER) ? parsedOpts.getOptionValue(ADMIN_SERVER) : DEFAULT_ADMIN_SERVER;
        int port = DEFAULT_ADMIN_PORT;
        try {
            if (parsedOpts.hasOption(ADMIN_PORT)) {
                port = Integer.parseInt(parsedOpts.getOptionValue(ADMIN_PORT));
            }
        } catch (NumberFormatException e) {
            printHelpAndExit("The port must be a valid integer.", false);
        }
        adminClient = new AdminClient(host, port);
        try {
            getAction(parsedOpts).execute(parsedOpts, adminClient, resultLimit);
        } catch (CommandException e) {
            printHelpAndExit(e.getMessage(), false);
        }
    } catch (ParseException e) {
        printHelpAndExit("Failed to parse jobs arguments: " + e.getMessage(), true);
    } finally {
        if (adminClient != null)
            adminClient.close();
    }
}
Also used : CommandLine(org.apache.commons.cli.CommandLine) ParseException(org.apache.commons.cli.ParseException) DefaultParser(org.apache.commons.cli.DefaultParser)

Aggregations

DefaultParser (org.apache.commons.cli.DefaultParser)344 CommandLine (org.apache.commons.cli.CommandLine)290 Options (org.apache.commons.cli.Options)241 CommandLineParser (org.apache.commons.cli.CommandLineParser)234 ParseException (org.apache.commons.cli.ParseException)224 HelpFormatter (org.apache.commons.cli.HelpFormatter)116 Option (org.apache.commons.cli.Option)67 File (java.io.File)34 IOException (java.io.IOException)32 HashMap (java.util.HashMap)30 JobConfig (edu.iu.dsc.tws.api.JobConfig)26 ArrayList (java.util.ArrayList)26 Config (edu.iu.dsc.tws.api.config.Config)25 Twister2Job (edu.iu.dsc.tws.api.Twister2Job)21 Properties (java.util.Properties)19 Test (org.junit.jupiter.api.Test)19 Field (core.field.Field)17 Level (java.util.logging.Level)13 ToolOptions (com.google.api.tools.framework.tools.ToolOptions)12 OptionGroup (org.apache.commons.cli.OptionGroup)11