use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.
the class HSAdmin method main.
public static void main(String[] args) throws Exception {
JobConf conf = new JobConf();
int result = ToolRunner.run(new HSAdmin(conf), args);
System.exit(result);
}
use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.
the class JobHistoryServer method launchJobHistoryServer.
static JobHistoryServer launchJobHistoryServer(String[] args) {
Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
StringUtils.startupShutdownMessage(JobHistoryServer.class, args, LOG);
JobHistoryServer jobHistoryServer = null;
try {
jobHistoryServer = new JobHistoryServer();
ShutdownHookManager.get().addShutdownHook(new CompositeServiceShutdownHook(jobHistoryServer), SHUTDOWN_HOOK_PRIORITY);
YarnConfiguration conf = new YarnConfiguration(new JobConf());
new GenericOptionsParser(conf, args);
jobHistoryServer.init(conf);
jobHistoryServer.start();
} catch (Throwable t) {
LOG.fatal("Error starting JobHistoryServer", t);
ExitUtil.terminate(-1, "Error starting JobHistoryServer");
}
return jobHistoryServer;
}
use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.
the class MapReduceTrackingUriPlugin method setConf.
@Override
public void setConf(Configuration conf) {
Configuration jobConf = null;
// Force loading of mapred configuration.
if (conf != null) {
jobConf = new JobConf(conf);
} else {
jobConf = new JobConf();
}
super.setConf(jobConf);
}
use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.
the class TestFetcher method setup.
@Before
// mocked generics
@SuppressWarnings("unchecked")
public void setup() {
LOG.info(">>>> " + name.getMethodName());
job = new JobConf();
job.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, false);
jobWithRetry = new JobConf();
jobWithRetry.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, true);
id = TaskAttemptID.forName("attempt_0_1_r_1_1");
ss = mock(ShuffleSchedulerImpl.class);
mm = mock(MergeManagerImpl.class);
r = mock(Reporter.class);
metrics = mock(ShuffleClientMetrics.class);
except = mock(ExceptionReporter.class);
key = JobTokenSecretManager.createSecretKey(new byte[] { 0, 0, 0, 0 });
connection = mock(HttpURLConnection.class);
allErrs = mock(Counters.Counter.class);
when(r.getCounter(anyString(), anyString())).thenReturn(allErrs);
ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1);
maps.add(map1ID);
maps.add(map2ID);
when(ss.getMapsForHost(host)).thenReturn(maps);
}
use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.
the class TestInputPath method testInputPath.
@Test
public void testInputPath() throws Exception {
JobConf jobConf = new JobConf();
Path workingDir = jobConf.getWorkingDirectory();
Path path = new Path(workingDir, "xx{y" + StringUtils.COMMA_STR + "z}");
FileInputFormat.setInputPaths(jobConf, path);
Path[] paths = FileInputFormat.getInputPaths(jobConf);
assertEquals(1, paths.length);
assertEquals(path.toString(), paths[0].toString());
StringBuilder pathStr = new StringBuilder();
pathStr.append(StringUtils.ESCAPE_CHAR);
pathStr.append(StringUtils.ESCAPE_CHAR);
pathStr.append(StringUtils.COMMA);
pathStr.append(StringUtils.COMMA);
pathStr.append('a');
path = new Path(workingDir, pathStr.toString());
FileInputFormat.setInputPaths(jobConf, path);
paths = FileInputFormat.getInputPaths(jobConf);
assertEquals(1, paths.length);
assertEquals(path.toString(), paths[0].toString());
pathStr.setLength(0);
pathStr.append(StringUtils.ESCAPE_CHAR);
pathStr.append("xx");
pathStr.append(StringUtils.ESCAPE_CHAR);
path = new Path(workingDir, pathStr.toString());
Path path1 = new Path(workingDir, "yy" + StringUtils.COMMA_STR + "zz");
FileInputFormat.setInputPaths(jobConf, path);
FileInputFormat.addInputPath(jobConf, path1);
paths = FileInputFormat.getInputPaths(jobConf);
assertEquals(2, paths.length);
assertEquals(path.toString(), paths[0].toString());
assertEquals(path1.toString(), paths[1].toString());
FileInputFormat.setInputPaths(jobConf, path, path1);
paths = FileInputFormat.getInputPaths(jobConf);
assertEquals(2, paths.length);
assertEquals(path.toString(), paths[0].toString());
assertEquals(path1.toString(), paths[1].toString());
Path[] input = new Path[] { path, path1 };
FileInputFormat.setInputPaths(jobConf, input);
paths = FileInputFormat.getInputPaths(jobConf);
assertEquals(2, paths.length);
assertEquals(path.toString(), paths[0].toString());
assertEquals(path1.toString(), paths[1].toString());
pathStr.setLength(0);
String str1 = "{a{b,c},de}";
String str2 = "xyz";
String str3 = "x{y,z}";
pathStr.append(str1);
pathStr.append(StringUtils.COMMA);
pathStr.append(str2);
pathStr.append(StringUtils.COMMA);
pathStr.append(str3);
FileInputFormat.setInputPaths(jobConf, pathStr.toString());
paths = FileInputFormat.getInputPaths(jobConf);
assertEquals(3, paths.length);
assertEquals(new Path(workingDir, str1).toString(), paths[0].toString());
assertEquals(new Path(workingDir, str2).toString(), paths[1].toString());
assertEquals(new Path(workingDir, str3).toString(), paths[2].toString());
pathStr.setLength(0);
String str4 = "abc";
String str5 = "pq{r,s}";
pathStr.append(str4);
pathStr.append(StringUtils.COMMA);
pathStr.append(str5);
FileInputFormat.addInputPaths(jobConf, pathStr.toString());
paths = FileInputFormat.getInputPaths(jobConf);
assertEquals(5, paths.length);
assertEquals(new Path(workingDir, str1).toString(), paths[0].toString());
assertEquals(new Path(workingDir, str2).toString(), paths[1].toString());
assertEquals(new Path(workingDir, str3).toString(), paths[2].toString());
assertEquals(new Path(workingDir, str4).toString(), paths[3].toString());
assertEquals(new Path(workingDir, str5).toString(), paths[4].toString());
}
Aggregations