Search in sources :

Example 76 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestHSAdminServer method init.

@Before
public void init() throws HadoopIllegalArgumentException, IOException {
    conf = new JobConf();
    conf.set(JHAdminConfig.JHS_ADMIN_ADDRESS, "0.0.0.0:0");
    conf.setClass("hadoop.security.group.mapping", MockUnixGroupsMapping.class, GroupMappingServiceProvider.class);
    conf.setLong("hadoop.security.groups.cache.secs", groupRefreshTimeoutSec);
    conf.setBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, securityEnabled);
    Groups.getUserToGroupsMappingService(conf);
    jobHistoryService = mock(JobHistory.class);
    alds = mock(AggregatedLogDeletionService.class);
    hsAdminServer = new HSAdminServer(alds, jobHistoryService) {

        @Override
        protected Configuration createConf() {
            return conf;
        }
    };
    hsAdminServer.init(conf);
    hsAdminServer.start();
    conf.setSocketAddr(JHAdminConfig.JHS_ADMIN_ADDRESS, hsAdminServer.clientRpcServer.getListenerAddress());
    hsAdminClient = new HSAdmin(conf);
}
Also used : HSAdmin(org.apache.hadoop.mapreduce.v2.hs.client.HSAdmin) Configuration(org.apache.hadoop.conf.Configuration) JobHistory(org.apache.hadoop.mapreduce.v2.hs.JobHistory) AggregatedLogDeletionService(org.apache.hadoop.yarn.logaggregation.AggregatedLogDeletionService) JobConf(org.apache.hadoop.mapred.JobConf) Before(org.junit.Before)

Example 77 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestHsWebServicesAcls method setup.

@Before
public void setup() throws IOException {
    this.conf = new JobConf();
    this.conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING, NullGroupsProvider.class.getName());
    this.conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
    Groups.getUserToGroupsMappingService(conf);
    this.ctx = buildHistoryContext(this.conf);
    WebApp webApp = mock(HsWebApp.class);
    when(webApp.name()).thenReturn("hsmockwebapp");
    this.hsWebServices = new HsWebServices(ctx, conf, webApp);
    this.hsWebServices.setResponse(mock(HttpServletResponse.class));
    Job job = ctx.getAllJobs().values().iterator().next();
    this.jobIdStr = job.getID().toString();
    Task task = job.getTasks().values().iterator().next();
    this.taskIdStr = task.getID().toString();
    this.taskAttemptIdStr = task.getAttempts().keySet().iterator().next().toString();
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) HttpServletResponse(javax.servlet.http.HttpServletResponse) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobConf(org.apache.hadoop.mapred.JobConf) WebApp(org.apache.hadoop.yarn.webapp.WebApp) Before(org.junit.Before)

Example 78 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestChainMapReduce method testChain.

@Test
public void testChain() throws Exception {
    Path inDir = new Path("testing/chain/input");
    Path outDir = new Path("testing/chain/output");
    // Hack for local FS that does not have the concept of a 'mounting point'
    if (isLocalFS()) {
        String localPathRoot = System.getProperty("test.build.data", "/tmp").replace(' ', '+');
        inDir = new Path(localPathRoot, inDir);
        outDir = new Path(localPathRoot, outDir);
    }
    JobConf conf = createJobConf();
    conf.setBoolean("localFS", isLocalFS());
    conf.setInt("mapreduce.job.maps", 1);
    cleanFlags(conf);
    FileSystem fs = FileSystem.get(conf);
    fs.delete(outDir, true);
    if (!fs.mkdirs(inDir)) {
        throw new IOException("Mkdirs failed to create " + inDir.toString());
    }
    DataOutputStream file = fs.create(new Path(inDir, "part-0"));
    file.writeBytes("1\n2\n");
    file.close();
    conf.setJobName("chain");
    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);
    conf.set("a", "X");
    JobConf mapAConf = new JobConf(false);
    mapAConf.set("a", "A");
    ChainMapper.addMapper(conf, AMap.class, LongWritable.class, Text.class, LongWritable.class, Text.class, true, mapAConf);
    ChainMapper.addMapper(conf, BMap.class, LongWritable.class, Text.class, LongWritable.class, Text.class, false, null);
    JobConf reduceConf = new JobConf(false);
    reduceConf.set("a", "C");
    ChainReducer.setReducer(conf, CReduce.class, LongWritable.class, Text.class, LongWritable.class, Text.class, true, reduceConf);
    ChainReducer.addMapper(conf, DMap.class, LongWritable.class, Text.class, LongWritable.class, Text.class, false, null);
    JobConf mapEConf = new JobConf(false);
    mapEConf.set("a", "E");
    ChainReducer.addMapper(conf, EMap.class, LongWritable.class, Text.class, LongWritable.class, Text.class, true, mapEConf);
    FileInputFormat.setInputPaths(conf, inDir);
    FileOutputFormat.setOutputPath(conf, outDir);
    JobClient jc = new JobClient(conf);
    RunningJob job = jc.submitJob(conf);
    while (!job.isComplete()) {
        Thread.sleep(100);
    }
    assertTrue(getFlag(conf, "configure.A"));
    assertTrue(getFlag(conf, "configure.B"));
    assertTrue(getFlag(conf, "configure.C"));
    assertTrue(getFlag(conf, "configure.D"));
    assertTrue(getFlag(conf, "configure.E"));
    assertTrue(getFlag(conf, "map.A.value.1"));
    assertTrue(getFlag(conf, "map.A.value.2"));
    assertTrue(getFlag(conf, "map.B.value.1"));
    assertTrue(getFlag(conf, "map.B.value.2"));
    assertTrue(getFlag(conf, "reduce.C.value.2"));
    assertTrue(getFlag(conf, "reduce.C.value.1"));
    assertTrue(getFlag(conf, "map.D.value.1"));
    assertTrue(getFlag(conf, "map.D.value.2"));
    assertTrue(getFlag(conf, "map.E.value.1"));
    assertTrue(getFlag(conf, "map.E.value.2"));
    assertTrue(getFlag(conf, "close.A"));
    assertTrue(getFlag(conf, "close.B"));
    assertTrue(getFlag(conf, "close.C"));
    assertTrue(getFlag(conf, "close.D"));
    assertTrue(getFlag(conf, "close.E"));
}
Also used : Path(org.apache.hadoop.fs.Path) DataOutputStream(java.io.DataOutputStream) FileSystem(org.apache.hadoop.fs.FileSystem) RunningJob(org.apache.hadoop.mapred.RunningJob) IOException(java.io.IOException) JobConf(org.apache.hadoop.mapred.JobConf) JobClient(org.apache.hadoop.mapred.JobClient) Test(org.junit.Test)

Example 79 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestDelegatingInputFormat method testSplitting.

@Test
public void testSplitting() throws Exception {
    JobConf conf = new JobConf();
    MiniDFSCluster dfs = null;
    try {
        dfs = new MiniDFSCluster.Builder(conf).numDataNodes(4).racks(new String[] { "/rack0", "/rack0", "/rack1", "/rack1" }).hosts(new String[] { "host0", "host1", "host2", "host3" }).build();
        FileSystem fs = dfs.getFileSystem();
        Path path = getPath("/foo/bar", fs);
        Path path2 = getPath("/foo/baz", fs);
        Path path3 = getPath("/bar/bar", fs);
        Path path4 = getPath("/bar/baz", fs);
        final int numSplits = 100;
        MultipleInputs.addInputPath(conf, path, TextInputFormat.class, MapClass.class);
        MultipleInputs.addInputPath(conf, path2, TextInputFormat.class, MapClass2.class);
        MultipleInputs.addInputPath(conf, path3, KeyValueTextInputFormat.class, MapClass.class);
        MultipleInputs.addInputPath(conf, path4, TextInputFormat.class, MapClass2.class);
        DelegatingInputFormat inFormat = new DelegatingInputFormat();
        InputSplit[] splits = inFormat.getSplits(conf, numSplits);
        int[] bins = new int[3];
        for (InputSplit split : splits) {
            assertTrue(split instanceof TaggedInputSplit);
            final TaggedInputSplit tis = (TaggedInputSplit) split;
            int index = -1;
            if (tis.getInputFormatClass().equals(KeyValueTextInputFormat.class)) {
                // path3
                index = 0;
            } else if (tis.getMapperClass().equals(MapClass.class)) {
                // path
                index = 1;
            } else {
                // path2 and path4
                index = 2;
            }
            bins[index]++;
        }
        // regardless of the number of paths that use that Mapper/InputFormat
        for (int count : bins) {
            assertEquals(numSplits, count);
        }
        assertTrue(true);
    } finally {
        if (dfs != null) {
            dfs.shutdown();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) FileSystem(org.apache.hadoop.fs.FileSystem) JobConf(org.apache.hadoop.mapred.JobConf) InputSplit(org.apache.hadoop.mapred.InputSplit) Test(org.junit.Test)

Example 80 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestMultipleInputs method testAddInputPathWithMapper.

@Test
public void testAddInputPathWithMapper() {
    final JobConf conf = new JobConf();
    MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class, MapClass.class);
    MultipleInputs.addInputPath(conf, new Path("/bar"), KeyValueTextInputFormat.class, MapClass2.class);
    final Map<Path, InputFormat> inputs = MultipleInputs.getInputFormatMap(conf);
    final Map<Path, Class<? extends Mapper>> maps = MultipleInputs.getMapperTypeMap(conf);
    assertEquals(TextInputFormat.class, inputs.get(new Path("/foo")).getClass());
    assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar")).getClass());
    assertEquals(MapClass.class, maps.get(new Path("/foo")));
    assertEquals(MapClass2.class, maps.get(new Path("/bar")));
}
Also used : Path(org.apache.hadoop.fs.Path) Mapper(org.apache.hadoop.mapred.Mapper) TextInputFormat(org.apache.hadoop.mapred.TextInputFormat) InputFormat(org.apache.hadoop.mapred.InputFormat) KeyValueTextInputFormat(org.apache.hadoop.mapred.KeyValueTextInputFormat) JobConf(org.apache.hadoop.mapred.JobConf) Test(org.junit.Test)

Aggregations

JobConf (org.apache.hadoop.mapred.JobConf)1037 Path (org.apache.hadoop.fs.Path)510 Test (org.junit.Test)317 FileSystem (org.apache.hadoop.fs.FileSystem)264 IOException (java.io.IOException)204 Configuration (org.apache.hadoop.conf.Configuration)163 InputSplit (org.apache.hadoop.mapred.InputSplit)110 ArrayList (java.util.ArrayList)89 Text (org.apache.hadoop.io.Text)82 File (java.io.File)81 RunningJob (org.apache.hadoop.mapred.RunningJob)67 Properties (java.util.Properties)58 List (java.util.List)49 HashMap (java.util.HashMap)47 DMLRuntimeException (org.apache.sysml.runtime.DMLRuntimeException)47 SequenceFile (org.apache.hadoop.io.SequenceFile)45 TextInputFormat (org.apache.hadoop.mapred.TextInputFormat)44 Map (java.util.Map)42 Job (org.apache.hadoop.mapreduce.Job)42 LongWritable (org.apache.hadoop.io.LongWritable)41