use of org.apache.hadoop.util.GenericOptionsParser in project hadoop-book by elephantscale.
the class SecondarySort method main.
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length != 2) {
System.err.println("Usage: secondarysrot <in> <out>");
System.exit(2);
}
Job job = new Job(conf, "secondary sort");
job.setJarByClass(SecondarySort.class);
job.setMapperClass(MapClass.class);
job.setReducerClass(Reduce.class);
// group and partition by the first int in the pair
job.setPartitionerClass(FirstPartitioner.class);
job.setGroupingComparatorClass(FirstGroupingComparator.class);
// the map output is IntPair, IntWritable
job.setMapOutputKeyClass(IntPair.class);
job.setMapOutputValueClass(IntWritable.class);
// the reduce output is Text, IntWritable
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class WordCount method main.
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length < 2) {
System.err.println("Usage: wordcount <in> [<in>...] <out>");
System.exit(2);
}
Job job = Job.getInstance(conf, "word count");
job.setJarByClass(WordCount.class);
job.setMapperClass(TokenizerMapper.class);
job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
for (int i = 0; i < otherArgs.length - 1; ++i) {
FileInputFormat.addInputPath(job, new Path(otherArgs[i]));
}
FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class NodeManager method main.
public static void main(String[] args) throws IOException {
Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
StringUtils.startupShutdownMessage(NodeManager.class, args, LOG);
@SuppressWarnings("resource") NodeManager nodeManager = new NodeManager();
Configuration conf = new YarnConfiguration();
new GenericOptionsParser(conf, args);
nodeManager.initAndStartNodeManager(conf, false);
}
use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class JobHistoryServer method launchJobHistoryServer.
static JobHistoryServer launchJobHistoryServer(String[] args) {
Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
StringUtils.startupShutdownMessage(JobHistoryServer.class, args, LOG);
JobHistoryServer jobHistoryServer = null;
try {
jobHistoryServer = new JobHistoryServer();
ShutdownHookManager.get().addShutdownHook(new CompositeServiceShutdownHook(jobHistoryServer), SHUTDOWN_HOOK_PRIORITY);
YarnConfiguration conf = new YarnConfiguration(new JobConf());
new GenericOptionsParser(conf, args);
jobHistoryServer.init(conf);
jobHistoryServer.start();
} catch (Throwable t) {
LOG.fatal("Error starting JobHistoryServer", t);
ExitUtil.terminate(-1, "Error starting JobHistoryServer");
}
return jobHistoryServer;
}
use of org.apache.hadoop.util.GenericOptionsParser in project hadoop by apache.
the class TimelineSchemaCreator method main.
public static void main(String[] args) throws Exception {
Configuration hbaseConf = HBaseConfiguration.create();
// Grab input args and allow for -Dxyz style arguments
String[] otherArgs = new GenericOptionsParser(hbaseConf, args).getRemainingArgs();
// Grab the arguments we're looking for.
CommandLine commandLine = parseArgs(otherArgs);
// Grab the entityTableName argument
String entityTableName = commandLine.getOptionValue(ENTITY_TABLE_NAME_SHORT);
if (StringUtils.isNotBlank(entityTableName)) {
hbaseConf.set(EntityTable.TABLE_NAME_CONF_NAME, entityTableName);
}
String entityTableTTLMetrics = commandLine.getOptionValue(TTL_OPTION_SHORT);
if (StringUtils.isNotBlank(entityTableTTLMetrics)) {
int metricsTTL = Integer.parseInt(entityTableTTLMetrics);
new EntityTable().setMetricsTTL(metricsTTL, hbaseConf);
}
// Grab the appToflowTableName argument
String appToflowTableName = commandLine.getOptionValue(APP_TO_FLOW_TABLE_NAME_SHORT);
if (StringUtils.isNotBlank(appToflowTableName)) {
hbaseConf.set(AppToFlowTable.TABLE_NAME_CONF_NAME, appToflowTableName);
}
// Grab the applicationTableName argument
String applicationTableName = commandLine.getOptionValue(APP_TABLE_NAME_SHORT);
if (StringUtils.isNotBlank(applicationTableName)) {
hbaseConf.set(ApplicationTable.TABLE_NAME_CONF_NAME, applicationTableName);
}
List<Exception> exceptions = new ArrayList<>();
try {
boolean skipExisting = commandLine.hasOption(SKIP_EXISTING_TABLE_OPTION_SHORT);
if (skipExisting) {
LOG.info("Will skip existing tables and continue on htable creation " + "exceptions!");
}
createAllTables(hbaseConf, skipExisting);
LOG.info("Successfully created HBase schema. ");
} catch (IOException e) {
LOG.error("Error in creating hbase tables: " + e.getMessage());
exceptions.add(e);
}
if (exceptions.size() > 0) {
LOG.warn("Schema creation finished with the following exceptions");
for (Exception e : exceptions) {
LOG.warn(e.getMessage());
}
System.exit(-1);
} else {
LOG.info("Schema creation finished successfully");
}
}
Aggregations