Search in sources :

Example 56 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestDBInputFormat method testSetInput.

/** 
   * test configuration for db. should works DBConfiguration.* parameters. 
   */
@Test(timeout = 5000)
public void testSetInput() {
    JobConf configuration = new JobConf();
    String[] fieldNames = { "field1", "field2" };
    DBInputFormat.setInput(configuration, NullDBWritable.class, "table", "conditions", "orderBy", fieldNames);
    assertEquals("org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", configuration.getClass(DBConfiguration.INPUT_CLASS_PROPERTY, null).getName());
    assertEquals("table", configuration.get(DBConfiguration.INPUT_TABLE_NAME_PROPERTY, null));
    String[] fields = configuration.getStrings(DBConfiguration.INPUT_FIELD_NAMES_PROPERTY);
    assertEquals("field1", fields[0]);
    assertEquals("field2", fields[1]);
    assertEquals("conditions", configuration.get(DBConfiguration.INPUT_CONDITIONS_PROPERTY, null));
    assertEquals("orderBy", configuration.get(DBConfiguration.INPUT_ORDER_BY_PROPERTY, null));
    configuration = new JobConf();
    DBInputFormat.setInput(configuration, NullDBWritable.class, "query", "countQuery");
    assertEquals("query", configuration.get(DBConfiguration.INPUT_QUERY, null));
    assertEquals("countQuery", configuration.get(DBConfiguration.INPUT_COUNT_QUERY, null));
    JobConf jConfiguration = new JobConf();
    DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl", "user", "password");
    assertEquals("driverClass", jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
    assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
    assertEquals("user", jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
    assertEquals("password", jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
    jConfiguration = new JobConf();
    DBConfiguration.configureDB(jConfiguration, "driverClass", "dbUrl");
    assertEquals("driverClass", jConfiguration.get(DBConfiguration.DRIVER_CLASS_PROPERTY));
    assertEquals("dbUrl", jConfiguration.get(DBConfiguration.URL_PROPERTY));
    assertNull(jConfiguration.get(DBConfiguration.USERNAME_PROPERTY));
    assertNull(jConfiguration.get(DBConfiguration.PASSWORD_PROPERTY));
}
Also used : JobConf(org.apache.hadoop.mapred.JobConf) DriverForTest(org.apache.hadoop.mapreduce.lib.db.DriverForTest) Test(org.junit.Test)

Example 57 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestDBInputFormat method testDBRecordReader.

/**
   * 
   * test DBRecordReader. This reader should creates keys, values, know about position.. 
   */
@SuppressWarnings("unchecked")
@Test(timeout = 5000)
public void testDBRecordReader() throws Exception {
    JobConf job = mock(JobConf.class);
    DBConfiguration dbConfig = mock(DBConfiguration.class);
    String[] fields = { "field1", "filed2" };
    @SuppressWarnings("rawtypes") DBRecordReader reader = new DBInputFormat<NullDBWritable>().new DBRecordReader(new DBInputSplit(), NullDBWritable.class, job, DriverForTest.getConnection(), dbConfig, "condition", fields, "table");
    LongWritable key = reader.createKey();
    assertEquals(0, key.get());
    DBWritable value = reader.createValue();
    assertEquals("org.apache.hadoop.mapred.lib.db.DBInputFormat$NullDBWritable", value.getClass().getName());
    assertEquals(0, reader.getPos());
    assertFalse(reader.next(key, value));
}
Also used : DBConfiguration(org.apache.hadoop.mapred.lib.db.DBConfiguration) DBRecordReader(org.apache.hadoop.mapred.lib.db.DBInputFormat.DBRecordReader) NullDBWritable(org.apache.hadoop.mapred.lib.db.DBInputFormat.NullDBWritable) DBInputSplit(org.apache.hadoop.mapred.lib.db.DBInputFormat.DBInputSplit) LongWritable(org.apache.hadoop.io.LongWritable) JobConf(org.apache.hadoop.mapred.JobConf) NullDBWritable(org.apache.hadoop.mapred.lib.db.DBInputFormat.NullDBWritable) DriverForTest(org.apache.hadoop.mapreduce.lib.db.DriverForTest) Test(org.junit.Test)

Example 58 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestShufflePlugin method testProviderApi.

@Test
public /**
   * A testing method verifying availability and accessibility of API needed for
   * AuxiliaryService(s) which are "Shuffle-Providers" (ShuffleHandler and 3rd party plugins)
   */
void testProviderApi() {
    LocalDirAllocator mockLocalDirAllocator = mock(LocalDirAllocator.class);
    JobConf mockJobConf = mock(JobConf.class);
    try {
        mockLocalDirAllocator.getLocalPathToRead("", mockJobConf);
    } catch (Exception e) {
        assertTrue("Threw exception:" + e, false);
    }
}
Also used : LocalDirAllocator(org.apache.hadoop.fs.LocalDirAllocator) JobConf(org.apache.hadoop.mapred.JobConf) Test(org.junit.Test)

Example 59 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestNoDefaultsJobConf method testNoDefaults.

@Test
public void testNoDefaults() throws Exception {
    JobConf configuration = new JobConf();
    assertTrue(configuration.get("hadoop.tmp.dir", null) != null);
    configuration = new JobConf(false);
    assertTrue(configuration.get("hadoop.tmp.dir", null) == null);
    Path inDir = new Path("testing/jobconf/input");
    Path outDir = new Path("testing/jobconf/output");
    OutputStream os = getFileSystem().create(new Path(inDir, "text.txt"));
    Writer wr = new OutputStreamWriter(os);
    wr.write("hello\n");
    wr.write("hello\n");
    wr.close();
    JobConf conf = new JobConf(false);
    conf.set("fs.defaultFS", createJobConf().get("fs.defaultFS"));
    conf.setJobName("mr");
    conf.setInputFormat(TextInputFormat.class);
    conf.setMapOutputKeyClass(LongWritable.class);
    conf.setMapOutputValueClass(Text.class);
    conf.setOutputFormat(TextOutputFormat.class);
    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);
    conf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class);
    conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);
    FileInputFormat.setInputPaths(conf, inDir);
    FileOutputFormat.setOutputPath(conf, outDir);
    JobClient.runJob(conf);
    Path[] outputFiles = FileUtil.stat2Paths(getFileSystem().listStatus(outDir, new Utils.OutputFileUtils.OutputFilesFilter()));
    if (outputFiles.length > 0) {
        InputStream is = getFileSystem().open(outputFiles[0]);
        BufferedReader reader = new BufferedReader(new InputStreamReader(is));
        String line = reader.readLine();
        int counter = 0;
        while (line != null) {
            counter++;
            assertTrue(line.contains("hello"));
            line = reader.readLine();
        }
        reader.close();
        assertEquals(2, counter);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) InputStreamReader(java.io.InputStreamReader) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) Utils(org.apache.hadoop.mapred.Utils) BufferedReader(java.io.BufferedReader) OutputStreamWriter(java.io.OutputStreamWriter) JobConf(org.apache.hadoop.mapred.JobConf) Writer(java.io.Writer) OutputStreamWriter(java.io.OutputStreamWriter) Test(org.junit.Test)

Example 60 with JobConf

use of org.apache.hadoop.mapred.JobConf in project hadoop by apache.

the class TestDFSIO method runIOTest.

private void runIOTest(Class<? extends Mapper<Text, LongWritable, Text, Text>> mapperClass, Path outputDir) throws IOException {
    JobConf job = new JobConf(config, TestDFSIO.class);
    FileInputFormat.setInputPaths(job, getControlDir(config));
    job.setInputFormat(SequenceFileInputFormat.class);
    job.setMapperClass(mapperClass);
    job.setReducerClass(AccumulatingReducer.class);
    FileOutputFormat.setOutputPath(job, outputDir);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    job.setNumReduceTasks(1);
    JobClient.runJob(job);
}
Also used : JobConf(org.apache.hadoop.mapred.JobConf)

Aggregations

JobConf (org.apache.hadoop.mapred.JobConf)1037 Path (org.apache.hadoop.fs.Path)510 Test (org.junit.Test)317 FileSystem (org.apache.hadoop.fs.FileSystem)264 IOException (java.io.IOException)204 Configuration (org.apache.hadoop.conf.Configuration)163 InputSplit (org.apache.hadoop.mapred.InputSplit)110 ArrayList (java.util.ArrayList)89 Text (org.apache.hadoop.io.Text)82 File (java.io.File)81 RunningJob (org.apache.hadoop.mapred.RunningJob)67 Properties (java.util.Properties)58 List (java.util.List)49 HashMap (java.util.HashMap)47 DMLRuntimeException (org.apache.sysml.runtime.DMLRuntimeException)47 SequenceFile (org.apache.hadoop.io.SequenceFile)45 TextInputFormat (org.apache.hadoop.mapred.TextInputFormat)44 Map (java.util.Map)42 Job (org.apache.hadoop.mapreduce.Job)42 LongWritable (org.apache.hadoop.io.LongWritable)41