use of org.apache.hadoop.util.GenericOptionsParser in project tez by apache.
the class FaultToleranceTestRunner method main.
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
String className = null;
String confFilePath = null;
if (otherArgs.length == 1) {
className = otherArgs[0];
} else if (otherArgs.length == 2) {
className = otherArgs[0];
confFilePath = otherArgs[1];
} else {
printUsage();
System.exit(1);
}
FaultToleranceTestRunner job = new FaultToleranceTestRunner();
if (job.run(conf, className, confFilePath)) {
System.out.println("Succeeded.");
} else {
System.out.println("Failed.");
System.exit(2);
}
}
use of org.apache.hadoop.util.GenericOptionsParser in project cdap by caskdata.
the class MapReduceRuntimeService method createJob.
/**
* Creates a MapReduce {@link Job} instance.
*
* @param hadoopTmpDir directory for the "hadoop.tmp.dir" configuration
*/
private Job createJob(File hadoopTmpDir) throws IOException {
Job job = Job.getInstance(new Configuration(hConf));
Configuration jobConf = job.getConfiguration();
if (MapReduceTaskContextProvider.isLocal(jobConf)) {
// Set the MR framework local directories inside the given tmp directory.
// Setting "hadoop.tmp.dir" here has no effect due to Explore Service need to set "hadoop.tmp.dir"
// as system property for Hive to work in local mode. The variable substitution of hadoop conf
// gives system property the highest precedence.
jobConf.set("mapreduce.cluster.local.dir", new File(hadoopTmpDir, "local").getAbsolutePath());
jobConf.set("mapreduce.jobtracker.system.dir", new File(hadoopTmpDir, "system").getAbsolutePath());
jobConf.set("mapreduce.jobtracker.staging.root.dir", new File(hadoopTmpDir, "staging").getAbsolutePath());
jobConf.set("mapreduce.cluster.temp.dir", new File(hadoopTmpDir, "temp").getAbsolutePath());
}
if (UserGroupInformation.isSecurityEnabled()) {
// If runs in secure cluster, this program runner is running in a yarn container, hence not able
// to get authenticated with the history.
jobConf.unset("mapreduce.jobhistory.address");
jobConf.setBoolean(Job.JOB_AM_ACCESS_DISABLED, false);
Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
LOG.debug("Running in secure mode; adding all user credentials: {}", credentials.getAllTokens());
job.getCredentials().addAll(credentials);
}
// Command-line arguments are not supported here, but do this anyway to avoid warning log
GenericOptionsParser genericOptionsParser = new GenericOptionsParser(jobConf, null);
genericOptionsParser.getRemainingArgs();
return job;
}
use of org.apache.hadoop.util.GenericOptionsParser in project flink by apache.
the class HadoopUtils method paramsFromGenericOptionsParser.
/**
* Returns {@link ParameterTool} for the arguments parsed by {@link GenericOptionsParser}.
*
* @param args Input array arguments. It should be parsable by {@link GenericOptionsParser}
* @return A {@link ParameterTool}
* @throws IOException If arguments cannot be parsed by {@link GenericOptionsParser}
* @see GenericOptionsParser
*/
public static ParameterTool paramsFromGenericOptionsParser(String[] args) throws IOException {
Option[] options = new GenericOptionsParser(args).getCommandLine().getOptions();
Map<String, String> map = new HashMap<String, String>();
for (Option option : options) {
String[] split = option.getValue().split("=");
map.put(split[0], split[1]);
}
return ParameterTool.fromMap(map);
}
use of org.apache.hadoop.util.GenericOptionsParser in project hbase by apache.
the class TestImportExport method testExportScan.
@Test
public void testExportScan() throws Exception {
int version = 100;
long startTime = EnvironmentEdgeManager.currentTime();
long endTime = startTime + 1;
String prefix = "row";
String label_0 = "label_0";
String label_1 = "label_1";
String[] args = { "table", "outputDir", String.valueOf(version), String.valueOf(startTime), String.valueOf(endTime), prefix };
Scan scan = ExportUtils.getScanFromCommandLine(UTIL.getConfiguration(), args);
assertEquals(version, scan.getMaxVersions());
assertEquals(startTime, scan.getTimeRange().getMin());
assertEquals(endTime, scan.getTimeRange().getMax());
assertEquals(true, (scan.getFilter() instanceof PrefixFilter));
assertEquals(0, Bytes.compareTo(((PrefixFilter) scan.getFilter()).getPrefix(), Bytes.toBytesBinary(prefix)));
String[] argsWithLabels = { "-D " + ExportUtils.EXPORT_VISIBILITY_LABELS + "=" + label_0 + "," + label_1, "table", "outputDir", String.valueOf(version), String.valueOf(startTime), String.valueOf(endTime), prefix };
Configuration conf = new Configuration(UTIL.getConfiguration());
// parse the "-D" options
String[] otherArgs = new GenericOptionsParser(conf, argsWithLabels).getRemainingArgs();
Scan scanWithLabels = ExportUtils.getScanFromCommandLine(conf, otherArgs);
assertEquals(version, scanWithLabels.getMaxVersions());
assertEquals(startTime, scanWithLabels.getTimeRange().getMin());
assertEquals(endTime, scanWithLabels.getTimeRange().getMax());
assertEquals(true, (scanWithLabels.getFilter() instanceof PrefixFilter));
assertEquals(0, Bytes.compareTo(((PrefixFilter) scanWithLabels.getFilter()).getPrefix(), Bytes.toBytesBinary(prefix)));
assertEquals(2, scanWithLabels.getAuthorizations().getLabels().size());
assertEquals(label_0, scanWithLabels.getAuthorizations().getLabels().get(0));
assertEquals(label_1, scanWithLabels.getAuthorizations().getLabels().get(1));
}
use of org.apache.hadoop.util.GenericOptionsParser in project hbase by apache.
the class HashTable method run.
@Override
public int run(String[] args) throws Exception {
String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
if (!doCommandLine(otherArgs)) {
return 1;
}
Job job = createSubmittableJob(otherArgs);
writeTempManifestFile();
if (!job.waitForCompletion(true)) {
LOG.info("Map-reduce job failed!");
return 1;
}
completeManifest();
return 0;
}
Aggregations