Search in sources :

Example 6 with Option

use of org.apache.hadoop.io.MapFile.Writer.Option in project nutch by apache.

the class TestLinkDbMerger method createLinkDb.

private void createLinkDb(Configuration config, FileSystem fs, Path linkdb, TreeMap<String, String[]> init) throws Exception {
    LOG.debug("* creating linkdb: " + linkdb);
    Path dir = new Path(linkdb, LinkDb.CURRENT_NAME);
    Option wKeyOpt = MapFile.Writer.keyClass(Text.class);
    org.apache.hadoop.io.SequenceFile.Writer.Option wValueOpt = SequenceFile.Writer.valueClass(Inlinks.class);
    MapFile.Writer writer = new MapFile.Writer(config, new Path(dir, "part-00000"), wKeyOpt, wValueOpt);
    Iterator<String> it = init.keySet().iterator();
    while (it.hasNext()) {
        String key = it.next();
        Inlinks inlinks = new Inlinks();
        String[] vals = init.get(key);
        for (int i = 0; i < vals.length; i++) {
            Inlink in = new Inlink(vals[i], vals[i]);
            inlinks.add(in);
        }
        writer.append(new Text(key), inlinks);
    }
    writer.close();
}
Also used : Path(org.apache.hadoop.fs.Path) MapFile(org.apache.hadoop.io.MapFile) Text(org.apache.hadoop.io.Text) Option(org.apache.hadoop.io.MapFile.Writer.Option)

Example 7 with Option

use of org.apache.hadoop.io.MapFile.Writer.Option in project nutch by apache.

the class FetcherOutputFormat method getRecordWriter.

@Override
public RecordWriter<Text, NutchWritable> getRecordWriter(TaskAttemptContext context) throws IOException {
    Configuration conf = context.getConfiguration();
    String name = getUniqueFile(context, "part", "");
    Path out = FileOutputFormat.getOutputPath(context);
    final Path fetch = new Path(new Path(out, CrawlDatum.FETCH_DIR_NAME), name);
    final Path content = new Path(new Path(out, Content.DIR_NAME), name);
    final CompressionType compType = SequenceFileOutputFormat.getOutputCompressionType(context);
    Option fKeyClassOpt = MapFile.Writer.keyClass(Text.class);
    org.apache.hadoop.io.SequenceFile.Writer.Option fValClassOpt = SequenceFile.Writer.valueClass(CrawlDatum.class);
    org.apache.hadoop.io.SequenceFile.Writer.Option fProgressOpt = SequenceFile.Writer.progressable((Progressable) context);
    org.apache.hadoop.io.SequenceFile.Writer.Option fCompOpt = SequenceFile.Writer.compression(compType);
    final MapFile.Writer fetchOut = new MapFile.Writer(conf, fetch, fKeyClassOpt, fValClassOpt, fCompOpt, fProgressOpt);
    return new RecordWriter<Text, NutchWritable>() {

        private MapFile.Writer contentOut;

        private RecordWriter<Text, Parse> parseOut;

        {
            if (Fetcher.isStoringContent(conf)) {
                Option cKeyClassOpt = MapFile.Writer.keyClass(Text.class);
                org.apache.hadoop.io.SequenceFile.Writer.Option cValClassOpt = SequenceFile.Writer.valueClass(Content.class);
                org.apache.hadoop.io.SequenceFile.Writer.Option cProgressOpt = SequenceFile.Writer.progressable((Progressable) context);
                org.apache.hadoop.io.SequenceFile.Writer.Option cCompOpt = SequenceFile.Writer.compression(compType);
                contentOut = new MapFile.Writer(conf, content, cKeyClassOpt, cValClassOpt, cCompOpt, cProgressOpt);
            }
            if (Fetcher.isParsing(conf)) {
                parseOut = new ParseOutputFormat().getRecordWriter(context);
            }
        }

        public void write(Text key, NutchWritable value) throws IOException, InterruptedException {
            Writable w = value.get();
            if (w instanceof CrawlDatum)
                fetchOut.append(key, w);
            else if (w instanceof Content && contentOut != null)
                contentOut.append(key, w);
            else if (w instanceof Parse && parseOut != null)
                parseOut.write(key, (Parse) w);
        }

        public void close(TaskAttemptContext context) throws IOException, InterruptedException {
            fetchOut.close();
            if (contentOut != null) {
                contentOut.close();
            }
            if (parseOut != null) {
                parseOut.close(context);
            }
        }
    };
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) Parse(org.apache.nutch.parse.Parse) MapFile(org.apache.hadoop.io.MapFile) NutchWritable(org.apache.nutch.crawl.NutchWritable) Writable(org.apache.hadoop.io.Writable) NutchWritable(org.apache.nutch.crawl.NutchWritable) CrawlDatum(org.apache.nutch.crawl.CrawlDatum) Text(org.apache.hadoop.io.Text) TaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext) RecordWriter(org.apache.hadoop.mapreduce.RecordWriter) Content(org.apache.nutch.protocol.Content) ParseOutputFormat(org.apache.nutch.parse.ParseOutputFormat) Option(org.apache.hadoop.io.MapFile.Writer.Option) CompressionType(org.apache.hadoop.io.SequenceFile.CompressionType) RecordWriter(org.apache.hadoop.mapreduce.RecordWriter)

Aggregations

Path (org.apache.hadoop.fs.Path)7 MapFile (org.apache.hadoop.io.MapFile)7 Option (org.apache.hadoop.io.MapFile.Writer.Option)7 Text (org.apache.hadoop.io.Text)7 CrawlDatum (org.apache.nutch.crawl.CrawlDatum)3 Configuration (org.apache.hadoop.conf.Configuration)2 CompressionType (org.apache.hadoop.io.SequenceFile.CompressionType)2 RecordWriter (org.apache.hadoop.mapreduce.RecordWriter)2 TaskAttemptContext (org.apache.hadoop.mapreduce.TaskAttemptContext)2 IOException (java.io.IOException)1 MalformedURLException (java.net.MalformedURLException)1 URL (java.net.URL)1 DecimalFormat (java.text.DecimalFormat)1 ArrayList (java.util.ArrayList)1 Entry (java.util.Map.Entry)1 FileStatus (org.apache.hadoop.fs.FileStatus)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 MapWritable (org.apache.hadoop.io.MapWritable)1 SequenceFile (org.apache.hadoop.io.SequenceFile)1 Metadata (org.apache.hadoop.io.SequenceFile.Metadata)1