use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class SecondarySort method main.
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length != 2) {
System.err.println("Usage: secondarysort <in> <out>");
System.exit(2);
}
Job job = Job.getInstance(conf, "secondary sort");
job.setJarByClass(SecondarySort.class);
job.setMapperClass(MapClass.class);
job.setReducerClass(Reduce.class);
// group and partition by the first int in the pair
job.setPartitionerClass(FirstPartitioner.class);
job.setGroupingComparatorClass(FirstGroupingComparator.class);
// the map output is IntPair, IntWritable
job.setMapOutputKeyClass(IntPair.class);
job.setMapOutputValueClass(IntWritable.class);
// the reduce output is Text, IntWritable
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class DFSZKFailoverController method main.
public static void main(String[] args) throws Exception {
StringUtils.startupShutdownMessage(DFSZKFailoverController.class, args, LOG);
if (DFSUtil.parseHelpArgument(args, ZKFailoverController.USAGE, System.out, true)) {
System.exit(0);
}
GenericOptionsParser parser = new GenericOptionsParser(new HdfsConfiguration(), args);
DFSZKFailoverController zkfc = DFSZKFailoverController.create(parser.getConfiguration());
int retCode = 0;
try {
retCode = zkfc.run(parser.getRemainingArgs());
} catch (Throwable t) {
LOG.fatal("Got a fatal error, exiting now", t);
}
System.exit(retCode);
}
use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class DelegationTokenFetcher method main.
/**
* Command-line interface
*/
public static void main(final String[] args) throws Exception {
final Configuration conf = new HdfsConfiguration();
Options fetcherOptions = new Options();
fetcherOptions.addOption(WEBSERVICE, true, "HTTP url to reach the NameNode at").addOption(RENEWER, true, "Name of the delegation token renewer").addOption(CANCEL, false, "cancel the token").addOption(RENEW, false, "renew the token").addOption(PRINT, false, "print the token").addOption(VERBOSE, false, "print verbose output").addOption(HELP_SHORT, HELP, false, "print out help information");
GenericOptionsParser parser = new GenericOptionsParser(conf, fetcherOptions, args);
CommandLine cmd = parser.getCommandLine();
final String webUrl = cmd.hasOption(WEBSERVICE) ? cmd.getOptionValue(WEBSERVICE) : null;
final String renewer = cmd.hasOption(RENEWER) ? cmd.getOptionValue(RENEWER) : null;
final boolean cancel = cmd.hasOption(CANCEL);
final boolean renew = cmd.hasOption(RENEW);
final boolean print = cmd.hasOption(PRINT);
final boolean verbose = cmd.hasOption(VERBOSE);
final boolean help = cmd.hasOption(HELP);
String[] remaining = parser.getRemainingArgs();
// check option validity
if (help) {
printUsage(System.out);
System.exit(0);
}
int commandCount = (cancel ? 1 : 0) + (renew ? 1 : 0) + (print ? 1 : 0);
if (commandCount > 1) {
System.err.println("ERROR: Only specify cancel, renew or print.");
printUsage(System.err);
}
if (remaining.length != 1 || remaining[0].charAt(0) == '-') {
System.err.println("ERROR: Must specify exactly one token file");
printUsage(System.err);
}
// default to using the local file system
FileSystem local = FileSystem.getLocal(conf);
final Path tokenFile = new Path(local.getWorkingDirectory(), remaining[0]);
// Login the current user
UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
if (print) {
printTokens(conf, tokenFile, verbose);
} else if (cancel) {
cancelTokens(conf, tokenFile);
} else if (renew) {
renewTokens(conf, tokenFile);
} else {
// otherwise we are fetching
FileSystem fs = getFileSystem(conf, webUrl);
saveDelegationToken(conf, fs, renewer, tokenFile);
}
return null;
}
});
}
use of org.apache.hadoop.util.GenericOptionsParser in project hive by apache.
the class DelegationTokenTool method readArgs.
private void readArgs(String[] args) throws Exception {
args = new GenericOptionsParser(getConf(), args).getRemainingArgs();
Options options = new Options();
options.addOption(new Option("confLocation", true, "Location of HCat/Hive Server's hive-site."));
options.addOption(new Option("delete", false, "Delete delegation token."));
options.addOption(new Option("list", false, "List delegation tokens."));
options.addOption(new Option("olderThan", true, "Filter for token's issue-date. (e.g. 3d, 1h or 4m)."));
options.addOption(new Option("expired", false, "Select expired delegation tokens for listing/deletion."));
options.addOption(new Option("dryRun", false, "Don't actually delete delegation tokens."));
options.addOption(new Option("batchSize", true, "Number of tokens to drop between sleep intervals."));
options.addOption(new Option("sleepTime", true, "Sleep-time in seconds, between batches of dropped delegation tokens."));
options.addOption(new Option("serverMode", true, "The service from which to read delegation tokens. Should be either of [METASTORE, HIVESERVER2]."));
CommandLine commandLine = new GnuParser().parse(options, args, // Stop on non-existent option.
false);
if (commandLine.hasOption("confLocation")) {
confLocation = commandLine.getOptionValue("confLocation");
}
if (commandLine.hasOption("list")) {
opType = OpType.LIST;
} else if (commandLine.hasOption("delete")) {
opType = OpType.DELETE;
} else {
throw new IllegalArgumentException("Operation must be delete, list or get!");
}
isDryRun = (commandLine.hasOption("dryRun"));
if (commandLine.hasOption("expired")) {
LOG.info("Working on expired delegation tokens!");
timeLimitMillis = System.currentTimeMillis();
selectForDeletion = new Predicate<DelegationTokenIdentifier>() {
public boolean apply(DelegationTokenIdentifier input) {
return timeLimitMillis > input.getMaxDate();
}
};
} else if (commandLine.hasOption("olderThan")) {
String olderThanLimitString = commandLine.getOptionValue("olderThan");
switch(olderThanLimitString.charAt(olderThanLimitString.length() - 1)) {
case 'd':
case 'D':
timeLimitMillis = System.currentTimeMillis() - 24 * 60 * 60 * 1000 * Integer.parseInt(olderThanLimitString.substring(0, olderThanLimitString.length() - 1));
break;
case 'h':
case 'H':
timeLimitMillis = System.currentTimeMillis() - 60 * 60 * 1000 * Integer.parseInt(olderThanLimitString.substring(0, olderThanLimitString.length() - 1));
break;
case 'm':
case 'M':
timeLimitMillis = System.currentTimeMillis() - 60 * 1000 * Integer.parseInt(olderThanLimitString.substring(0, olderThanLimitString.length() - 1));
break;
default:
throw new IllegalArgumentException("Unsupported time-limit: " + olderThanLimitString);
}
LOG.info("Working on delegation tokens older than current-time (" + timeLimitMillis + ").");
selectForDeletion = new Predicate<DelegationTokenIdentifier>() {
public boolean apply(DelegationTokenIdentifier input) {
return timeLimitMillis > input.getIssueDate();
}
};
} else {
// Neither "expired" nor "olderThan" criteria selected. This better not be an attempt to delete tokens.
if (opType == OpType.DELETE) {
throw new IllegalArgumentException("Attempting to delete tokens. " + "Specify deletion criteria (either expired or time-range).");
}
}
if (commandLine.hasOption("batchSize")) {
String batchSizeString = commandLine.getOptionValue("batchSize");
batchSize = Integer.parseInt(batchSizeString);
if (batchSize < 1) {
LOG.warn("Invalid batch-size! (" + batchSize + ") Resetting to defaults.");
batchSize = BATCH_SIZE_DEFAULT;
}
LOG.info("Batch-size for drop == " + batchSize);
}
if (commandLine.hasOption("sleepTime")) {
String sleepTimeString = commandLine.getOptionValue("sleepTime");
sleepTimeMillis = 1000 * Integer.parseInt(sleepTimeString);
if (sleepTimeMillis <= 0) {
LOG.warn("Invalid sleep-time! (" + sleepTimeMillis + ") Resetting to defaults.");
sleepTimeMillis = SLEEP_TIME_MILLIS_DEFAULT;
}
LOG.info("Sleep between drop-batches: " + sleepTimeMillis + " milliseconds.");
}
if (commandLine.hasOption("serverMode")) {
String serverModeString = commandLine.getOptionValue("serverMode").toLowerCase();
switch(serverModeString) {
case "metastore":
serverMode = HadoopThriftAuthBridge.Server.ServerMode.METASTORE;
break;
case "hiveserver2":
serverMode = HadoopThriftAuthBridge.Server.ServerMode.HIVESERVER2;
break;
default:
throw new IllegalArgumentException("Invalid value for for serverMode (" + serverModeString + ")" + "Should be either \"METASTORE\", or \"HIVESERVER2\"");
}
}
LOG.info("Running with serverMode == " + serverMode);
}
use of org.apache.hadoop.util.GenericOptionsParser in project nutch by apache.
the class ParseData method main.
public static void main(String[] argv) throws Exception {
String usage = "ParseData (-local | -dfs <namenode:port>) recno segment";
if (argv.length < 3) {
System.out.println("usage:" + usage);
return;
}
Options opts = new Options();
Configuration conf = NutchConfiguration.create();
GenericOptionsParser parser = new GenericOptionsParser(conf, opts, argv);
String[] remainingArgs = parser.getRemainingArgs();
try (FileSystem fs = FileSystem.get(conf)) {
int recno = Integer.parseInt(remainingArgs[0]);
String segment = remainingArgs[1];
Path file = new Path(segment, DIR_NAME);
System.out.println("Reading from file: " + file);
ArrayFile.Reader parses = new ArrayFile.Reader(fs, file.toString(), conf);
ParseData parseDatum = new ParseData();
parses.get(recno, parseDatum);
System.out.println("Retrieved " + recno + " from file " + file);
System.out.println(parseDatum);
parses.close();
}
}
Aggregations