use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class ReliabilityTest method run.
public int run(String[] args) throws Exception {
Configuration conf = getConf();
if ("local".equals(conf.get(JTConfig.JT_IPC_ADDRESS, "local"))) {
displayUsage();
}
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length == 2) {
if (otherArgs[0].equals("-scratchdir")) {
dir = otherArgs[1];
} else {
displayUsage();
}
} else if (otherArgs.length == 0) {
dir = System.getProperty("user.dir");
} else {
displayUsage();
}
//to protect against the case of jobs failing even when multiple attempts
//fail, set some high values for the max attempts
conf.setInt(JobContext.MAP_MAX_ATTEMPTS, 10);
conf.setInt(JobContext.REDUCE_MAX_ATTEMPTS, 10);
runSleepJobTest(new JobClient(new JobConf(conf)), conf);
runSortJobTests(new JobClient(new JobConf(conf)), conf);
return 0;
}
use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class Submitter method run.
@Override
public int run(String[] args) throws Exception {
CommandLineParser cli = new CommandLineParser();
if (args.length == 0) {
cli.printUsage();
return 1;
}
cli.addOption("input", false, "input path to the maps", "path");
cli.addOption("output", false, "output path from the reduces", "path");
cli.addOption("jar", false, "job jar file", "path");
cli.addOption("inputformat", false, "java classname of InputFormat", "class");
//cli.addArgument("javareader", false, "is the RecordReader in Java");
cli.addOption("map", false, "java classname of Mapper", "class");
cli.addOption("partitioner", false, "java classname of Partitioner", "class");
cli.addOption("reduce", false, "java classname of Reducer", "class");
cli.addOption("writer", false, "java classname of OutputFormat", "class");
cli.addOption("program", false, "URI to application executable", "class");
cli.addOption("reduces", false, "number of reduces", "num");
cli.addOption("jobconf", false, "\"n1=v1,n2=v2,..\" (Deprecated) Optional. Add or override a JobConf property.", "key=val");
cli.addOption("lazyOutput", false, "Optional. Create output lazily", "boolean");
Parser parser = cli.createParser();
try {
GenericOptionsParser genericParser = new GenericOptionsParser(getConf(), args);
CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs());
JobConf job = new JobConf(getConf());
if (results.hasOption("input")) {
FileInputFormat.setInputPaths(job, results.getOptionValue("input"));
}
if (results.hasOption("output")) {
FileOutputFormat.setOutputPath(job, new Path(results.getOptionValue("output")));
}
if (results.hasOption("jar")) {
job.setJar(results.getOptionValue("jar"));
}
if (results.hasOption("inputformat")) {
setIsJavaRecordReader(job, true);
job.setInputFormat(getClass(results, "inputformat", job, InputFormat.class));
}
if (results.hasOption("javareader")) {
setIsJavaRecordReader(job, true);
}
if (results.hasOption("map")) {
setIsJavaMapper(job, true);
job.setMapperClass(getClass(results, "map", job, Mapper.class));
}
if (results.hasOption("partitioner")) {
job.setPartitionerClass(getClass(results, "partitioner", job, Partitioner.class));
}
if (results.hasOption("reduce")) {
setIsJavaReducer(job, true);
job.setReducerClass(getClass(results, "reduce", job, Reducer.class));
}
if (results.hasOption("reduces")) {
job.setNumReduceTasks(Integer.parseInt(results.getOptionValue("reduces")));
}
if (results.hasOption("writer")) {
setIsJavaRecordWriter(job, true);
job.setOutputFormat(getClass(results, "writer", job, OutputFormat.class));
}
if (results.hasOption("lazyOutput")) {
if (Boolean.parseBoolean(results.getOptionValue("lazyOutput"))) {
LazyOutputFormat.setOutputFormatClass(job, job.getOutputFormat().getClass());
}
}
if (results.hasOption("program")) {
setExecutable(job, results.getOptionValue("program"));
}
if (results.hasOption("jobconf")) {
LOG.warn("-jobconf option is deprecated, please use -D instead.");
String options = results.getOptionValue("jobconf");
StringTokenizer tokenizer = new StringTokenizer(options, ",");
while (tokenizer.hasMoreTokens()) {
String keyVal = tokenizer.nextToken().trim();
String[] keyValSplit = keyVal.split("=");
job.set(keyValSplit[0], keyValSplit[1]);
}
}
// if they gave us a jar file, include it into the class path
String jarFile = job.getJar();
if (jarFile != null) {
final URL[] urls = new URL[] { FileSystem.getLocal(job).pathToFile(new Path(jarFile)).toURL() };
//FindBugs complains that creating a URLClassLoader should be
//in a doPrivileged() block.
ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
public ClassLoader run() {
return new URLClassLoader(urls);
}
});
job.setClassLoader(loader);
}
runJob(job);
return 0;
} catch (ParseException pe) {
LOG.info("Error : " + pe);
cli.printUsage();
return 1;
}
}
use of org.apache.hadoop.util.GenericOptionsParser in project flink by apache.
the class ParameterTool method fromGenericOptionsParser.
/**
* Returns {@link ParameterTool} for the arguments parsed by {@link GenericOptionsParser}
*
* @param args Input array arguments. It should be parsable by {@link GenericOptionsParser}
* @return A {@link ParameterTool}
* @throws IOException If arguments cannot be parsed by {@link GenericOptionsParser}
* @see GenericOptionsParser
* @deprecated Please use {@link org.apache.flink.hadoopcompatibility.HadoopUtils#paramsFromGenericOptionsParser(String[])}
* from project flink-hadoop-compatibility
*/
@Deprecated
@PublicEvolving
public static ParameterTool fromGenericOptionsParser(String[] args) throws IOException {
Option[] options = new GenericOptionsParser(args).getCommandLine().getOptions();
Map<String, String> map = new HashMap<String, String>();
for (Option option : options) {
String[] split = option.getValue().split("=");
map.put(split[0], split[1]);
}
return fromMap(map);
}
use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class DataNode method instantiateDataNode.
/** Instantiate a single datanode object, along with its secure resources.
* This must be run by invoking{@link DataNode#runDatanodeDaemon()}
* subsequently.
*/
public static DataNode instantiateDataNode(String[] args, Configuration conf, SecureResources resources) throws IOException {
if (conf == null)
conf = new HdfsConfiguration();
if (args != null) {
// parse generic hadoop options
GenericOptionsParser hParser = new GenericOptionsParser(conf, args);
args = hParser.getRemainingArgs();
}
if (!parseArguments(args, conf)) {
printUsage(System.err);
return null;
}
Collection<StorageLocation> dataLocations = getStorageLocations(conf);
UserGroupInformation.setConfiguration(conf);
SecurityUtil.login(conf, DFS_DATANODE_KEYTAB_FILE_KEY, DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, getHostName(conf));
return makeInstance(dataLocations, conf, resources);
}
use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class NameNode method createNameNode.
public static NameNode createNameNode(String[] argv, Configuration conf) throws IOException {
LOG.info("createNameNode " + Arrays.asList(argv));
if (conf == null)
conf = new HdfsConfiguration();
// Parse out some generic args into Configuration.
GenericOptionsParser hParser = new GenericOptionsParser(conf, argv);
argv = hParser.getRemainingArgs();
// Parse the rest, NN specific args.
StartupOption startOpt = parseArguments(argv);
if (startOpt == null) {
printUsage(System.err);
return null;
}
setStartupOption(conf, startOpt);
boolean aborted = false;
switch(startOpt) {
case FORMAT:
aborted = format(conf, startOpt.getForceFormat(), startOpt.getInteractiveFormat());
terminate(aborted ? 1 : 0);
// avoid javac warning
return null;
case GENCLUSTERID:
System.err.println("Generating new cluster id:");
System.out.println(NNStorage.newClusterID());
terminate(0);
return null;
case ROLLBACK:
aborted = doRollback(conf, true);
terminate(aborted ? 1 : 0);
// avoid warning
return null;
case BOOTSTRAPSTANDBY:
String[] toolArgs = Arrays.copyOfRange(argv, 1, argv.length);
int rc = BootstrapStandby.run(toolArgs, conf);
terminate(rc);
// avoid warning
return null;
case INITIALIZESHAREDEDITS:
aborted = initializeSharedEdits(conf, startOpt.getForceFormat(), startOpt.getInteractiveFormat());
terminate(aborted ? 1 : 0);
// avoid warning
return null;
case BACKUP:
case CHECKPOINT:
NamenodeRole role = startOpt.toNodeRole();
DefaultMetricsSystem.initialize(role.toString().replace(" ", ""));
return new BackupNode(conf, role);
case RECOVER:
NameNode.doRecovery(startOpt, conf);
return null;
case METADATAVERSION:
printMetadataVersion(conf);
terminate(0);
// avoid javac warning
return null;
case UPGRADEONLY:
DefaultMetricsSystem.initialize("NameNode");
new NameNode(conf);
terminate(0);
return null;
default:
DefaultMetricsSystem.initialize("NameNode");
return new NameNode(conf);
}
}
Aggregations