use of org.apache.hadoop.util.GenericOptionsParser in project hbase by apache.
the class SyncTable method run.
@Override
public int run(String[] args) throws Exception {
String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs();
if (!doCommandLine(otherArgs)) {
return 1;
}
Job job = createSubmittableJob(otherArgs);
if (!job.waitForCompletion(true)) {
LOG.info("Map-reduce job failed!");
return 1;
}
counters = job.getCounters();
return 0;
}
use of org.apache.hadoop.util.GenericOptionsParser in project hbase by apache.
the class Export method run.
@InterfaceAudience.Private
static Map<byte[], Response> run(final Configuration conf, final String[] args) throws Throwable {
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (!ExportUtils.isValidArguements(args)) {
ExportUtils.usage("Wrong number of arguments: " + ArrayUtils.getLength(otherArgs));
return null;
}
Triple<TableName, Scan, Path> arguments = ExportUtils.getArgumentsFromCommandLine(conf, otherArgs);
return run(conf, arguments.getFirst(), arguments.getSecond(), arguments.getThird());
}
use of org.apache.hadoop.util.GenericOptionsParser in project Cloud9 by lintool.
the class ClueWebAnchorTextForwardIndexHttpServer method main.
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length != 3) {
System.out.println("usage: [index-file] [docno-mapping-data-files] [clue-forward-index-root]");
// [clue-forward-index-root: /shared/ClueWeb09/collection.compressed.block/
System.exit(-1);
}
String indexFile = otherArgs[0];
String mappingFile = otherArgs[1];
String clueIndexRoot = otherArgs[2].endsWith("/") ? otherArgs[2] : otherArgs[2] + "/";
String cluewebForwardIndex = "";
for (int i = 1; i < 10; i++) cluewebForwardIndex += clueIndexRoot + "findex.en.0" + i + ".dat" + SEPARATOR + " ";
cluewebForwardIndex += clueIndexRoot + "findex.en.10.dat";
LOG.info("Launching DocumentForwardIndexHttpServer");
LOG.info(" - index file: " + indexFile);
LOG.info(" - docno mapping data file: " + mappingFile);
LOG.info(" - ClueWeb09 index root:" + clueIndexRoot);
FileSystem fs = FileSystem.get(conf);
Random rand = new Random();
int r = rand.nextInt();
// this tmp file as a rendezvous point
Path tmpPath = new Path("/tmp/" + r);
if (fs.exists(tmpPath)) {
fs.delete(tmpPath, true);
}
JobConf job = new JobConf(conf, ClueWebAnchorTextForwardIndexHttpServer.class);
job.setJobName("ForwardIndexServer:" + indexFile);
job.set("mapred.child.java.opts", "-Xmx2048m");
job.setNumMapTasks(1);
job.setNumReduceTasks(0);
job.setInputFormat(NullInputFormat.class);
job.setOutputFormat(NullOutputFormat.class);
job.setMapperClass(ServerMapper.class);
job.set("IndexFile", indexFile);
job.set("DocnoMappingDataFile", mappingFile);
job.set("TmpPath", tmpPath.toString());
job.set("ClueWebIndexFiles", cluewebForwardIndex);
JobClient client = new JobClient(job);
client.submitJob(job);
LOG.info("Waiting for server to start up...");
while (!fs.exists(tmpPath)) {
Thread.sleep(50000);
LOG.info("...");
}
FSDataInputStream in = fs.open(tmpPath);
String host = in.readUTF();
in.close();
LOG.info("host: " + host);
LOG.info("port: 8888");
}
use of org.apache.hadoop.util.GenericOptionsParser in project elephant-bird by twitter.
the class DeprecatedWrapperWordCount method main.
public static void main(String[] args) throws Exception {
System.out.println("CLASSPATH: " + System.getProperty("CLASSPATH"));
GenericOptionsParser options = new GenericOptionsParser(args);
args = options.getRemainingArgs();
if (args.length != 2) {
System.err.println("Usage: hadoop jar path/to/this.jar " + DeprecatedWrapperWordCount.class + " <input dir> <output dir>");
System.exit(1);
}
JobConf job = new JobConf(options.getConfiguration());
job.setJobName("Deprecated Wrapper Word Count");
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
job.setJarByClass(DeprecatedWrapperWordCount.class);
job.setMapperClass(WordCountMapper.class);
job.setCombinerClass(WordCountReducer.class);
job.setReducerClass(WordCountReducer.class);
job.setInputFormat(DeprecatedInputFormatWrapper.class);
DeprecatedInputFormatWrapper.setInputFormat(TextInputFormat.class, job);
job.setOutputFormat(DeprecatedOutputFormatWrapper.class);
DeprecatedOutputFormatWrapper.setOutputFormat(TextOutputFormat.class, job);
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
JobClient.runJob(job).waitForCompletion();
}
use of org.apache.hadoop.util.GenericOptionsParser in project elephant-bird by twitter.
the class ProtobufMRExample method main.
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
args = new GenericOptionsParser(conf, args).getRemainingArgs();
ProtobufMRExample runner = new ProtobufMRExample();
if (args.length != 2) {
System.out.println("Usage: hadoop jar path/to/this.jar " + runner.getClass() + " <input dir> <output dir>");
System.exit(1);
}
String test = conf.get("proto.test", "lzoIn");
if (test.equals("lzoIn"))
System.exit(runner.runLzoToText(args, conf));
if (test.equals("lzoOut"))
System.exit(runner.runTextToLzo(args, conf));
if (test.equals("sort"))
System.exit(runner.runSorter(args, conf));
}
Aggregations