Search in sources :

Example 36 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class HSAdmin method main.

public static void main(String[] args) throws Exception {
    JobConf conf = new JobConf();
    int result = ToolRunner.run(new HSAdmin(conf), args);
    System.exit(result);
}
Also used : JobConf(org.apache.hadoop.mapred.JobConf)

Example 37 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class JobHistoryServer method launchJobHistoryServer.

static JobHistoryServer launchJobHistoryServer(String[] args) {
    Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
    StringUtils.startupShutdownMessage(JobHistoryServer.class, args, LOG);
    JobHistoryServer jobHistoryServer = null;
    try {
        jobHistoryServer = new JobHistoryServer();
        ShutdownHookManager.get().addShutdownHook(new CompositeServiceShutdownHook(jobHistoryServer), SHUTDOWN_HOOK_PRIORITY);
        YarnConfiguration conf = new YarnConfiguration(new JobConf());
        new GenericOptionsParser(conf, args);
        jobHistoryServer.init(conf);
        jobHistoryServer.start();
    } catch (Throwable t) {
        LOG.fatal("Error starting JobHistoryServer", t);
        ExitUtil.terminate(-1, "Error starting JobHistoryServer");
    }
    return jobHistoryServer;
}
Also used : YarnUncaughtExceptionHandler(org.apache.hadoop.yarn.YarnUncaughtExceptionHandler) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) JobConf(org.apache.hadoop.mapred.JobConf) GenericOptionsParser(org.apache.hadoop.util.GenericOptionsParser)

Example 38 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class MapReduceTrackingUriPlugin method setConf.

@Override
public void setConf(Configuration conf) {
    Configuration jobConf = null;
    // Force loading of mapred configuration.
    if (conf != null) {
        jobConf = new JobConf(conf);
    } else {
        jobConf = new JobConf();
    }
    super.setConf(jobConf);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) JobConf(org.apache.hadoop.mapred.JobConf)

Example 39 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestFetcher method setup.

@Before
// mocked generics
@SuppressWarnings("unchecked")
public void setup() {
    LOG.info(">>>> " + name.getMethodName());
    job = new JobConf();
    job.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, false);
    jobWithRetry = new JobConf();
    jobWithRetry.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, true);
    id = TaskAttemptID.forName("attempt_0_1_r_1_1");
    ss = mock(ShuffleSchedulerImpl.class);
    mm = mock(MergeManagerImpl.class);
    r = mock(Reporter.class);
    metrics = mock(ShuffleClientMetrics.class);
    except = mock(ExceptionReporter.class);
    key = JobTokenSecretManager.createSecretKey(new byte[] { 0, 0, 0, 0 });
    connection = mock(HttpURLConnection.class);
    allErrs = mock(Counters.Counter.class);
    when(r.getCounter(anyString(), anyString())).thenReturn(allErrs);
    ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1);
    maps.add(map1ID);
    maps.add(map2ID);
    when(ss.getMapsForHost(host)).thenReturn(maps);
}
Also used : HttpURLConnection(java.net.HttpURLConnection) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) Reporter(org.apache.hadoop.mapred.Reporter) ArrayList(java.util.ArrayList) JobConf(org.apache.hadoop.mapred.JobConf) Before(org.junit.Before)

Example 40 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestInputPath method testInputPath.

@Test
public void testInputPath() throws Exception {
    JobConf jobConf = new JobConf();
    Path workingDir = jobConf.getWorkingDirectory();
    Path path = new Path(workingDir, "xx{y" + StringUtils.COMMA_STR + "z}");
    FileInputFormat.setInputPaths(jobConf, path);
    Path[] paths = FileInputFormat.getInputPaths(jobConf);
    assertEquals(1, paths.length);
    assertEquals(path.toString(), paths[0].toString());
    StringBuilder pathStr = new StringBuilder();
    pathStr.append(StringUtils.ESCAPE_CHAR);
    pathStr.append(StringUtils.ESCAPE_CHAR);
    pathStr.append(StringUtils.COMMA);
    pathStr.append(StringUtils.COMMA);
    pathStr.append('a');
    path = new Path(workingDir, pathStr.toString());
    FileInputFormat.setInputPaths(jobConf, path);
    paths = FileInputFormat.getInputPaths(jobConf);
    assertEquals(1, paths.length);
    assertEquals(path.toString(), paths[0].toString());
    pathStr.setLength(0);
    pathStr.append(StringUtils.ESCAPE_CHAR);
    pathStr.append("xx");
    pathStr.append(StringUtils.ESCAPE_CHAR);
    path = new Path(workingDir, pathStr.toString());
    Path path1 = new Path(workingDir, "yy" + StringUtils.COMMA_STR + "zz");
    FileInputFormat.setInputPaths(jobConf, path);
    FileInputFormat.addInputPath(jobConf, path1);
    paths = FileInputFormat.getInputPaths(jobConf);
    assertEquals(2, paths.length);
    assertEquals(path.toString(), paths[0].toString());
    assertEquals(path1.toString(), paths[1].toString());
    FileInputFormat.setInputPaths(jobConf, path, path1);
    paths = FileInputFormat.getInputPaths(jobConf);
    assertEquals(2, paths.length);
    assertEquals(path.toString(), paths[0].toString());
    assertEquals(path1.toString(), paths[1].toString());
    Path[] input = new Path[] { path, path1 };
    FileInputFormat.setInputPaths(jobConf, input);
    paths = FileInputFormat.getInputPaths(jobConf);
    assertEquals(2, paths.length);
    assertEquals(path.toString(), paths[0].toString());
    assertEquals(path1.toString(), paths[1].toString());
    pathStr.setLength(0);
    String str1 = "{a{b,c},de}";
    String str2 = "xyz";
    String str3 = "x{y,z}";
    pathStr.append(str1);
    pathStr.append(StringUtils.COMMA);
    pathStr.append(str2);
    pathStr.append(StringUtils.COMMA);
    pathStr.append(str3);
    FileInputFormat.setInputPaths(jobConf, pathStr.toString());
    paths = FileInputFormat.getInputPaths(jobConf);
    assertEquals(3, paths.length);
    assertEquals(new Path(workingDir, str1).toString(), paths[0].toString());
    assertEquals(new Path(workingDir, str2).toString(), paths[1].toString());
    assertEquals(new Path(workingDir, str3).toString(), paths[2].toString());
    pathStr.setLength(0);
    String str4 = "abc";
    String str5 = "pq{r,s}";
    pathStr.append(str4);
    pathStr.append(StringUtils.COMMA);
    pathStr.append(str5);
    FileInputFormat.addInputPaths(jobConf, pathStr.toString());
    paths = FileInputFormat.getInputPaths(jobConf);
    assertEquals(5, paths.length);
    assertEquals(new Path(workingDir, str1).toString(), paths[0].toString());
    assertEquals(new Path(workingDir, str2).toString(), paths[1].toString());
    assertEquals(new Path(workingDir, str3).toString(), paths[2].toString());
    assertEquals(new Path(workingDir, str4).toString(), paths[3].toString());
    assertEquals(new Path(workingDir, str5).toString(), paths[4].toString());
}
Also used : Path(org.apache.hadoop.fs.Path) JobConf(org.apache.hadoop.mapred.JobConf) Test(org.junit.Test)

Aggregations

JobConf (org.apache.hadoop.mapred.JobConf)1037 Path (org.apache.hadoop.fs.Path)510 Test (org.junit.Test)317 FileSystem (org.apache.hadoop.fs.FileSystem)264 IOException (java.io.IOException)204 Configuration (org.apache.hadoop.conf.Configuration)163 InputSplit (org.apache.hadoop.mapred.InputSplit)110 ArrayList (java.util.ArrayList)89 Text (org.apache.hadoop.io.Text)82 File (java.io.File)81 RunningJob (org.apache.hadoop.mapred.RunningJob)67 Properties (java.util.Properties)58 List (java.util.List)49 HashMap (java.util.HashMap)47 DMLRuntimeException (org.apache.sysml.runtime.DMLRuntimeException)47 SequenceFile (org.apache.hadoop.io.SequenceFile)45 TextInputFormat (org.apache.hadoop.mapred.TextInputFormat)44 Map (java.util.Map)42 Job (org.apache.hadoop.mapreduce.Job)42 LongWritable (org.apache.hadoop.io.LongWritable)41