Search in sources :

Example 16 with WritableComparable

use of org.apache.hadoop.io.WritableComparable in project hive by apache.

the class CommonMergeJoinOperator method compareKeysMany.

@SuppressWarnings("rawtypes")
private int compareKeysMany(WritableComparator[] comparators, final List<Object> k1, final List<Object> k2) {
    // invariant: k1.size == k2.size
    int ret = 0;
    final int size = k1.size();
    for (int i = 0; i < size; i++) {
        WritableComparable key_1 = (WritableComparable) k1.get(i);
        WritableComparable key_2 = (WritableComparable) k2.get(i);
        ret = compareKey(comparators, i, key_1, key_2, nullsafes != null ? nullsafes[i] : false);
        if (ret != 0) {
            return ret;
        }
    }
    return ret;
}
Also used : WritableComparable(org.apache.hadoop.io.WritableComparable)

Example 17 with WritableComparable

use of org.apache.hadoop.io.WritableComparable in project hive by apache.

the class TypedBytesWritableInput method readSortedMap.

public SortedMapWritable readSortedMap(SortedMapWritable mw) throws IOException {
    if (mw == null) {
        mw = new SortedMapWritable();
    }
    int length = in.readMapHeader();
    for (int i = 0; i < length; i++) {
        WritableComparable key = (WritableComparable) read();
        Writable value = read();
        mw.put(key, value);
    }
    return mw;
}
Also used : WritableComparable(org.apache.hadoop.io.WritableComparable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) NullWritable(org.apache.hadoop.io.NullWritable) VLongWritable(org.apache.hadoop.io.VLongWritable) Writable(org.apache.hadoop.io.Writable) MapWritable(org.apache.hadoop.io.MapWritable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) SortedMapWritable(org.apache.hadoop.io.SortedMapWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) VIntWritable(org.apache.hadoop.io.VIntWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) SortedMapWritable(org.apache.hadoop.io.SortedMapWritable)

Example 18 with WritableComparable

use of org.apache.hadoop.io.WritableComparable in project hadoop-book by elephantscale.

the class Sort method run.

/**
   * The main driver for sort program.
   * Invoke this method to submit the map/reduce job.
   * @throws IOException When there is communication problems with the 
   *                     job tracker.
   */
public int run(String[] args) throws Exception {
    JobConf jobConf = new JobConf(getConf(), Sort.class);
    jobConf.setJobName("sorter");
    jobConf.setMapperClass(IdentityMapper.class);
    jobConf.setReducerClass(IdentityReducer.class);
    JobClient client = new JobClient(jobConf);
    ClusterStatus cluster = client.getClusterStatus();
    int num_reduces = (int) (cluster.getMaxReduceTasks() * 0.9);
    String sort_reduces = jobConf.get("test.sort.reduces_per_host");
    if (sort_reduces != null) {
        num_reduces = cluster.getTaskTrackers() * Integer.parseInt(sort_reduces);
    }
    Class<? extends InputFormat> inputFormatClass = SequenceFileInputFormat.class;
    Class<? extends OutputFormat> outputFormatClass = SequenceFileOutputFormat.class;
    Class<? extends WritableComparable> outputKeyClass = BytesWritable.class;
    Class<? extends Writable> outputValueClass = BytesWritable.class;
    List<String> otherArgs = new ArrayList<String>();
    InputSampler.Sampler<K, V> sampler = null;
    for (int i = 0; i < args.length; ++i) {
        try {
            if ("-m".equals(args[i])) {
                jobConf.setNumMapTasks(Integer.parseInt(args[++i]));
            } else if ("-r".equals(args[i])) {
                num_reduces = Integer.parseInt(args[++i]);
            } else if ("-inFormat".equals(args[i])) {
                inputFormatClass = Class.forName(args[++i]).asSubclass(InputFormat.class);
            } else if ("-outFormat".equals(args[i])) {
                outputFormatClass = Class.forName(args[++i]).asSubclass(OutputFormat.class);
            } else if ("-outKey".equals(args[i])) {
                outputKeyClass = Class.forName(args[++i]).asSubclass(WritableComparable.class);
            } else if ("-outValue".equals(args[i])) {
                outputValueClass = Class.forName(args[++i]).asSubclass(Writable.class);
            } else if ("-totalOrder".equals(args[i])) {
                double pcnt = Double.parseDouble(args[++i]);
                int numSamples = Integer.parseInt(args[++i]);
                int maxSplits = Integer.parseInt(args[++i]);
                if (0 >= maxSplits)
                    maxSplits = Integer.MAX_VALUE;
                sampler = new InputSampler.RandomSampler<K, V>(pcnt, numSamples, maxSplits);
            } else {
                otherArgs.add(args[i]);
            }
        } catch (NumberFormatException except) {
            System.out.println("ERROR: Integer expected instead of " + args[i]);
            return printUsage();
        } catch (ArrayIndexOutOfBoundsException except) {
            System.out.println("ERROR: Required parameter missing from " + args[i - 1]);
            // exits
            return printUsage();
        }
    }
    // Set user-supplied (possibly default) job configs
    jobConf.setNumReduceTasks(num_reduces);
    jobConf.setInputFormat(inputFormatClass);
    jobConf.setOutputFormat(outputFormatClass);
    jobConf.setOutputKeyClass(outputKeyClass);
    jobConf.setOutputValueClass(outputValueClass);
    // Make sure there are exactly 2 parameters left.
    if (otherArgs.size() != 2) {
        System.out.println("ERROR: Wrong number of parameters: " + otherArgs.size() + " instead of 2.");
        return printUsage();
    }
    FileInputFormat.setInputPaths(jobConf, otherArgs.get(0));
    FileOutputFormat.setOutputPath(jobConf, new Path(otherArgs.get(1)));
    if (sampler != null) {
        System.out.println("Sampling input to effect total-order sort...");
        jobConf.setPartitionerClass(TotalOrderPartitioner.class);
        Path inputDir = FileInputFormat.getInputPaths(jobConf)[0];
        inputDir = inputDir.makeQualified(inputDir.getFileSystem(jobConf));
        Path partitionFile = new Path(inputDir, "_sortPartitioning");
        TotalOrderPartitioner.setPartitionFile(jobConf, partitionFile);
        InputSampler.<K, V>writePartitionFile(jobConf, sampler);
        URI partitionUri = new URI(partitionFile.toString() + "#" + "_sortPartitioning");
        DistributedCache.addCacheFile(partitionUri, jobConf);
        DistributedCache.createSymlink(jobConf);
    }
    System.out.println("Running on " + cluster.getTaskTrackers() + " nodes to sort from " + FileInputFormat.getInputPaths(jobConf)[0] + " into " + FileOutputFormat.getOutputPath(jobConf) + " with " + num_reduces + " reduces.");
    Date startTime = new Date();
    System.out.println("Job started: " + startTime);
    jobResult = JobClient.runJob(jobConf);
    Date end_time = new Date();
    System.out.println("Job ended: " + end_time);
    System.out.println("The job took " + (end_time.getTime() - startTime.getTime()) / 1000 + " seconds.");
    return 0;
}
Also used : InputSampler(org.apache.hadoop.mapred.lib.InputSampler) ArrayList(java.util.ArrayList) URI(java.net.URI) Path(org.apache.hadoop.fs.Path) BytesWritable(org.apache.hadoop.io.BytesWritable) Date(java.util.Date) WritableComparable(org.apache.hadoop.io.WritableComparable)

Example 19 with WritableComparable

use of org.apache.hadoop.io.WritableComparable in project hadoop-book by elephantscale.

the class Join method run.

/**
     * The main driver for sort program. Invoke this method to submit the
     * map/reduce job.
     *
     * @throws IOException When there is communication problems with the job
     * tracker.
     */
@Override
public int run(String[] args) throws Exception {
    JobConf jobConf = new JobConf(getConf(), Sort.class);
    jobConf.setJobName("join");
    jobConf.setMapperClass(IdentityMapper.class);
    jobConf.setReducerClass(IdentityReducer.class);
    JobClient client = new JobClient(jobConf);
    ClusterStatus cluster = client.getClusterStatus();
    int num_maps = cluster.getTaskTrackers() * jobConf.getInt("test.sort.maps_per_host", 10);
    int num_reduces = (int) (cluster.getMaxReduceTasks() * 0.9);
    String sort_reduces = jobConf.get("test.sort.reduces_per_host");
    if (sort_reduces != null) {
        num_reduces = cluster.getTaskTrackers() * Integer.parseInt(sort_reduces);
    }
    Class<? extends InputFormat> inputFormatClass = SequenceFileInputFormat.class;
    Class<? extends OutputFormat> outputFormatClass = SequenceFileOutputFormat.class;
    Class<? extends WritableComparable> outputKeyClass = BytesWritable.class;
    Class<? extends Writable> outputValueClass = TupleWritable.class;
    String op = "inner";
    List<String> otherArgs = new ArrayList<String>();
    for (int i = 0; i < args.length; ++i) {
        try {
            if ("-m".equals(args[i])) {
                num_maps = Integer.parseInt(args[++i]);
            } else if ("-r".equals(args[i])) {
                num_reduces = Integer.parseInt(args[++i]);
            } else if ("-inFormat".equals(args[i])) {
                inputFormatClass = Class.forName(args[++i]).asSubclass(InputFormat.class);
            } else if ("-outFormat".equals(args[i])) {
                outputFormatClass = Class.forName(args[++i]).asSubclass(OutputFormat.class);
            } else if ("-outKey".equals(args[i])) {
                outputKeyClass = Class.forName(args[++i]).asSubclass(WritableComparable.class);
            } else if ("-outValue".equals(args[i])) {
                outputValueClass = Class.forName(args[++i]).asSubclass(Writable.class);
            } else if ("-joinOp".equals(args[i])) {
                op = args[++i];
            } else {
                otherArgs.add(args[i]);
            }
        } catch (NumberFormatException except) {
            System.out.println("ERROR: Integer expected instead of " + args[i]);
            return printUsage();
        } catch (ArrayIndexOutOfBoundsException except) {
            System.out.println("ERROR: Required parameter missing from " + args[i - 1]);
            // exits
            return printUsage();
        }
    }
    // Set user-supplied (possibly default) job configs
    jobConf.setNumMapTasks(num_maps);
    jobConf.setNumReduceTasks(num_reduces);
    if (otherArgs.size() < 2) {
        System.out.println("ERROR: Wrong number of parameters: ");
        return printUsage();
    }
    FileOutputFormat.setOutputPath(jobConf, new Path(otherArgs.remove(otherArgs.size() - 1)));
    List<Path> plist = new ArrayList<Path>(otherArgs.size());
    for (String s : otherArgs) {
        plist.add(new Path(s));
    }
    jobConf.setInputFormat(CompositeInputFormat.class);
    jobConf.set("mapred.join.expr", CompositeInputFormat.compose(op, inputFormatClass, plist.toArray(new Path[0])));
    jobConf.setOutputFormat(outputFormatClass);
    jobConf.setOutputKeyClass(outputKeyClass);
    jobConf.setOutputValueClass(outputValueClass);
    Date startTime = new Date();
    System.out.println("Job started: " + startTime);
    JobClient.runJob(jobConf);
    Date end_time = new Date();
    System.out.println("Job ended: " + end_time);
    System.out.println("The job took " + (end_time.getTime() - startTime.getTime()) / 1000 + " seconds.");
    return 0;
}
Also used : Path(org.apache.hadoop.fs.Path) ArrayList(java.util.ArrayList) BytesWritable(org.apache.hadoop.io.BytesWritable) TupleWritable(org.apache.hadoop.mapred.join.TupleWritable) Date(java.util.Date) WritableComparable(org.apache.hadoop.io.WritableComparable) CompositeInputFormat(org.apache.hadoop.mapred.join.CompositeInputFormat)

Example 20 with WritableComparable

use of org.apache.hadoop.io.WritableComparable in project SQLWindowing by hbutani.

the class CompositeDataType method define.

public static CompositeDataType define(StructObjectInspector OI) throws WindowingException {
    List<? extends StructField> fields = OI.getAllStructFieldRefs();
    @SuppressWarnings("unchecked") DataType<? extends WritableComparable>[] elementTypes = (DataType<? extends WritableComparable>[]) new DataType[fields.size()];
    int i = 0;
    for (StructField f : fields) {
        ObjectInspector fOI = f.getFieldObjectInspector();
        if (fOI.getCategory() != Category.PRIMITIVE) {
            throw new WindowingException("Cannot handle non primitve fields for partitioning/sorting");
        }
        PrimitiveObjectInspector pOI = (PrimitiveObjectInspector) fOI;
        switch(pOI.getPrimitiveCategory()) {
            case BOOLEAN:
                elementTypes[i] = BOOLEAN;
                break;
            case DOUBLE:
                elementTypes[i] = DOUBLE;
                break;
            case BYTE:
                elementTypes[i] = BYTE;
                break;
            case FLOAT:
                elementTypes[i] = FLOAT;
                break;
            case INT:
                elementTypes[i] = INT;
                break;
            case LONG:
                elementTypes[i] = LONG;
                break;
            case SHORT:
                elementTypes[i] = SHORT;
                break;
            case STRING:
                elementTypes[i] = TEXT;
                break;
            default:
                throw new WindowingException(Utils.sprintf("Cannot handle datatype %s for partitioning/sorting", pOI.toString()));
        }
        i++;
    }
    return new CompositeDataType(",", elementTypes);
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) WritableComparable(org.apache.hadoop.io.WritableComparable) WindowingException(com.sap.hadoop.windowing.WindowingException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Aggregations

WritableComparable (org.apache.hadoop.io.WritableComparable)34 IOException (java.io.IOException)14 Writable (org.apache.hadoop.io.Writable)14 Path (org.apache.hadoop.fs.Path)13 FileSystem (org.apache.hadoop.fs.FileSystem)11 JobConf (org.apache.hadoop.mapred.JobConf)6 CompressionCodec (org.apache.hadoop.io.compress.CompressionCodec)5 ArrayList (java.util.ArrayList)4 IntWritable (org.apache.hadoop.io.IntWritable)4 NullWritable (org.apache.hadoop.io.NullWritable)4 SequenceFile (org.apache.hadoop.io.SequenceFile)4 TaskAttemptContext (org.apache.hadoop.mapreduce.TaskAttemptContext)4 PCollection (com.tdunning.plume.PCollection)3 OutputChannel (com.tdunning.plume.local.lazy.MSCR.OutputChannel)3 PlumeObject (com.tdunning.plume.local.lazy.MapRedExecutor.PlumeObject)3 HashMap (java.util.HashMap)3 BytesWritable (org.apache.hadoop.io.BytesWritable)3 FloatWritable (org.apache.hadoop.io.FloatWritable)3 HCatRecord (org.apache.hive.hcatalog.data.HCatRecord)3 DoFn (com.tdunning.plume.DoFn)2