Search in sources :

Example 21 with GenericOptionsParser

use of org.apache.hadoop.util.GenericOptionsParser in project tez by apache.

the class FaultToleranceTestRunner method main.

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    String className = null;
    String confFilePath = null;
    if (otherArgs.length == 1) {
        className = otherArgs[0];
    } else if (otherArgs.length == 2) {
        className = otherArgs[0];
        confFilePath = otherArgs[1];
    } else {
        printUsage();
        System.exit(1);
    }
    FaultToleranceTestRunner job = new FaultToleranceTestRunner();
    if (job.run(conf, className, confFilePath)) {
        System.out.println("Succeeded.");
    } else {
        System.out.println("Failed.");
        System.exit(2);
    }
}
Also used : YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Configuration(org.apache.hadoop.conf.Configuration) TezConfiguration(org.apache.tez.dag.api.TezConfiguration) GenericOptionsParser(org.apache.hadoop.util.GenericOptionsParser)

Example 22 with GenericOptionsParser

use of org.apache.hadoop.util.GenericOptionsParser in project cdap by caskdata.

the class MapReduceRuntimeService method createJob.

/**
 * Creates a MapReduce {@link Job} instance.
 *
 * @param hadoopTmpDir directory for the "hadoop.tmp.dir" configuration
 */
private Job createJob(File hadoopTmpDir) throws IOException {
    Job job = Job.getInstance(new Configuration(hConf));
    Configuration jobConf = job.getConfiguration();
    if (MapReduceTaskContextProvider.isLocal(jobConf)) {
        // Set the MR framework local directories inside the given tmp directory.
        // Setting "hadoop.tmp.dir" here has no effect due to Explore Service need to set "hadoop.tmp.dir"
        // as system property for Hive to work in local mode. The variable substitution of hadoop conf
        // gives system property the highest precedence.
        jobConf.set("mapreduce.cluster.local.dir", new File(hadoopTmpDir, "local").getAbsolutePath());
        jobConf.set("mapreduce.jobtracker.system.dir", new File(hadoopTmpDir, "system").getAbsolutePath());
        jobConf.set("mapreduce.jobtracker.staging.root.dir", new File(hadoopTmpDir, "staging").getAbsolutePath());
        jobConf.set("mapreduce.cluster.temp.dir", new File(hadoopTmpDir, "temp").getAbsolutePath());
    }
    if (UserGroupInformation.isSecurityEnabled()) {
        // If runs in secure cluster, this program runner is running in a yarn container, hence not able
        // to get authenticated with the history.
        jobConf.unset("mapreduce.jobhistory.address");
        jobConf.setBoolean(Job.JOB_AM_ACCESS_DISABLED, false);
        Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
        LOG.debug("Running in secure mode; adding all user credentials: {}", credentials.getAllTokens());
        job.getCredentials().addAll(credentials);
    }
    // Command-line arguments are not supported here, but do this anyway to avoid warning log
    GenericOptionsParser genericOptionsParser = new GenericOptionsParser(jobConf, null);
    genericOptionsParser.getRemainingArgs();
    return job;
}
Also used : CConfiguration(co.cask.cdap.common.conf.CConfiguration) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Job(org.apache.hadoop.mapreduce.Job) File(java.io.File) JarFile(java.util.jar.JarFile) Credentials(org.apache.hadoop.security.Credentials) GenericOptionsParser(org.apache.hadoop.util.GenericOptionsParser)

Example 23 with GenericOptionsParser

use of org.apache.hadoop.util.GenericOptionsParser in project flink by apache.

the class HadoopUtils method paramsFromGenericOptionsParser.

/**
 * Returns {@link ParameterTool} for the arguments parsed by {@link GenericOptionsParser}.
 *
 * @param args Input array arguments. It should be parsable by {@link GenericOptionsParser}
 * @return A {@link ParameterTool}
 * @throws IOException If arguments cannot be parsed by {@link GenericOptionsParser}
 * @see GenericOptionsParser
 */
public static ParameterTool paramsFromGenericOptionsParser(String[] args) throws IOException {
    Option[] options = new GenericOptionsParser(args).getCommandLine().getOptions();
    Map<String, String> map = new HashMap<String, String>();
    for (Option option : options) {
        String[] split = option.getValue().split("=");
        map.put(split[0], split[1]);
    }
    return ParameterTool.fromMap(map);
}
Also used : HashMap(java.util.HashMap) Option(org.apache.commons.cli.Option) GenericOptionsParser(org.apache.hadoop.util.GenericOptionsParser)

Example 24 with GenericOptionsParser

use of org.apache.hadoop.util.GenericOptionsParser in project hbase by apache.

the class TestImportExport method testExportScan.

@Test
public void testExportScan() throws Exception {
    int version = 100;
    long startTime = EnvironmentEdgeManager.currentTime();
    long endTime = startTime + 1;
    String prefix = "row";
    String label_0 = "label_0";
    String label_1 = "label_1";
    String[] args = { "table", "outputDir", String.valueOf(version), String.valueOf(startTime), String.valueOf(endTime), prefix };
    Scan scan = ExportUtils.getScanFromCommandLine(UTIL.getConfiguration(), args);
    assertEquals(version, scan.getMaxVersions());
    assertEquals(startTime, scan.getTimeRange().getMin());
    assertEquals(endTime, scan.getTimeRange().getMax());
    assertEquals(true, (scan.getFilter() instanceof PrefixFilter));
    assertEquals(0, Bytes.compareTo(((PrefixFilter) scan.getFilter()).getPrefix(), Bytes.toBytesBinary(prefix)));
    String[] argsWithLabels = { "-D " + ExportUtils.EXPORT_VISIBILITY_LABELS + "=" + label_0 + "," + label_1, "table", "outputDir", String.valueOf(version), String.valueOf(startTime), String.valueOf(endTime), prefix };
    Configuration conf = new Configuration(UTIL.getConfiguration());
    // parse the "-D" options
    String[] otherArgs = new GenericOptionsParser(conf, argsWithLabels).getRemainingArgs();
    Scan scanWithLabels = ExportUtils.getScanFromCommandLine(conf, otherArgs);
    assertEquals(version, scanWithLabels.getMaxVersions());
    assertEquals(startTime, scanWithLabels.getTimeRange().getMin());
    assertEquals(endTime, scanWithLabels.getTimeRange().getMax());
    assertEquals(true, (scanWithLabels.getFilter() instanceof PrefixFilter));
    assertEquals(0, Bytes.compareTo(((PrefixFilter) scanWithLabels.getFilter()).getPrefix(), Bytes.toBytesBinary(prefix)));
    assertEquals(2, scanWithLabels.getAuthorizations().getLabels().size());
    assertEquals(label_0, scanWithLabels.getAuthorizations().getLabels().get(0));
    assertEquals(label_1, scanWithLabels.getAuthorizations().getLabels().get(1));
}
Also used : PrefixFilter(org.apache.hadoop.hbase.filter.PrefixFilter) Configuration(org.apache.hadoop.conf.Configuration) Scan(org.apache.hadoop.hbase.client.Scan) GenericOptionsParser(org.apache.hadoop.util.GenericOptionsParser) Test(org.junit.Test)

Example 25 with GenericOptionsParser

use of org.apache.hadoop.util.GenericOptionsParser in project hbase by apache.

the class HashTable method run.

@Override
public int run(String[] args) throws Exception {
    String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
    if (!doCommandLine(otherArgs)) {
        return 1;
    }
    Job job = createSubmittableJob(otherArgs);
    writeTempManifestFile();
    if (!job.waitForCompletion(true)) {
        LOG.info("Map-reduce job failed!");
        return 1;
    }
    completeManifest();
    return 0;
}
Also used : Job(org.apache.hadoop.mapreduce.Job) GenericOptionsParser(org.apache.hadoop.util.GenericOptionsParser)

Aggregations

GenericOptionsParser (org.apache.hadoop.util.GenericOptionsParser)102 Configuration (org.apache.hadoop.conf.Configuration)72 Path (org.apache.hadoop.fs.Path)38 Job (org.apache.hadoop.mapreduce.Job)35 CommandLine (org.apache.commons.cli.CommandLine)18 IOException (java.io.IOException)15 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)11 PosixParser (org.apache.commons.cli.PosixParser)10 FileSystem (org.apache.hadoop.fs.FileSystem)10 HCatSchema (org.apache.hive.hcatalog.data.schema.HCatSchema)10 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)9 ParseException (org.apache.commons.cli.ParseException)7 Test (org.junit.jupiter.api.Test)7 ArrayList (java.util.ArrayList)6 Options (org.apache.commons.cli.Options)6 JobConf (org.apache.hadoop.mapred.JobConf)6 File (java.io.File)5 HashMap (java.util.HashMap)5 YarnUncaughtExceptionHandler (org.apache.hadoop.yarn.YarnUncaughtExceptionHandler)5 TezConfiguration (org.apache.tez.dag.api.TezConfiguration)5