use of org.apache.hadoop.mapreduce.OutputFormat in project hadoop by apache.
the class TestRecovery method writeOutput.
private void writeOutput(TaskAttempt attempt, Configuration conf) throws Exception {
TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, TypeConverter.fromYarn(attempt.getID()));
TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(tContext);
NullWritable nullWritable = NullWritable.get();
try {
theRecordWriter.write(key1, val1);
theRecordWriter.write(null, nullWritable);
theRecordWriter.write(null, val1);
theRecordWriter.write(nullWritable, val2);
theRecordWriter.write(key2, nullWritable);
theRecordWriter.write(key1, null);
theRecordWriter.write(null, null);
theRecordWriter.write(key2, val2);
} finally {
theRecordWriter.close(tContext);
}
OutputFormat outputFormat = ReflectionUtils.newInstance(tContext.getOutputFormatClass(), conf);
OutputCommitter committer = outputFormat.getOutputCommitter(tContext);
committer.commitTask(tContext);
}
use of org.apache.hadoop.mapreduce.OutputFormat in project hadoop by apache.
the class Join method run.
/**
* The main driver for sort program.
* Invoke this method to submit the map/reduce job.
* @throws IOException When there is communication problems with the
* job tracker.
*/
@SuppressWarnings("unchecked")
public int run(String[] args) throws Exception {
Configuration conf = getConf();
JobClient client = new JobClient(conf);
ClusterStatus cluster = client.getClusterStatus();
int num_reduces = (int) (cluster.getMaxReduceTasks() * 0.9);
String join_reduces = conf.get(REDUCES_PER_HOST);
if (join_reduces != null) {
num_reduces = cluster.getTaskTrackers() * Integer.parseInt(join_reduces);
}
Job job = Job.getInstance(conf);
job.setJobName("join");
job.setJarByClass(Sort.class);
job.setMapperClass(Mapper.class);
job.setReducerClass(Reducer.class);
Class<? extends InputFormat> inputFormatClass = SequenceFileInputFormat.class;
Class<? extends OutputFormat> outputFormatClass = SequenceFileOutputFormat.class;
Class<? extends WritableComparable> outputKeyClass = BytesWritable.class;
Class<? extends Writable> outputValueClass = TupleWritable.class;
String op = "inner";
List<String> otherArgs = new ArrayList<String>();
for (int i = 0; i < args.length; ++i) {
try {
if ("-r".equals(args[i])) {
num_reduces = Integer.parseInt(args[++i]);
} else if ("-inFormat".equals(args[i])) {
inputFormatClass = Class.forName(args[++i]).asSubclass(InputFormat.class);
} else if ("-outFormat".equals(args[i])) {
outputFormatClass = Class.forName(args[++i]).asSubclass(OutputFormat.class);
} else if ("-outKey".equals(args[i])) {
outputKeyClass = Class.forName(args[++i]).asSubclass(WritableComparable.class);
} else if ("-outValue".equals(args[i])) {
outputValueClass = Class.forName(args[++i]).asSubclass(Writable.class);
} else if ("-joinOp".equals(args[i])) {
op = args[++i];
} else {
otherArgs.add(args[i]);
}
} catch (NumberFormatException except) {
System.out.println("ERROR: Integer expected instead of " + args[i]);
return printUsage();
} catch (ArrayIndexOutOfBoundsException except) {
System.out.println("ERROR: Required parameter missing from " + args[i - 1]);
// exits
return printUsage();
}
}
// Set user-supplied (possibly default) job configs
job.setNumReduceTasks(num_reduces);
if (otherArgs.size() < 2) {
System.out.println("ERROR: Wrong number of parameters: ");
return printUsage();
}
FileOutputFormat.setOutputPath(job, new Path(otherArgs.remove(otherArgs.size() - 1)));
List<Path> plist = new ArrayList<Path>(otherArgs.size());
for (String s : otherArgs) {
plist.add(new Path(s));
}
job.setInputFormatClass(CompositeInputFormat.class);
job.getConfiguration().set(CompositeInputFormat.JOIN_EXPR, CompositeInputFormat.compose(op, inputFormatClass, plist.toArray(new Path[0])));
job.setOutputFormatClass(outputFormatClass);
job.setOutputKeyClass(outputKeyClass);
job.setOutputValueClass(outputValueClass);
Date startTime = new Date();
System.out.println("Job started: " + startTime);
int ret = job.waitForCompletion(true) ? 0 : 1;
Date end_time = new Date();
System.out.println("Job ended: " + end_time);
System.out.println("The job took " + (end_time.getTime() - startTime.getTime()) / 1000 + " seconds.");
return ret;
}
use of org.apache.hadoop.mapreduce.OutputFormat in project ignite by apache.
the class HadoopV2SetupTask method run0.
/** {@inheritDoc} */
@SuppressWarnings("ConstantConditions")
@Override
protected void run0(HadoopV2TaskContext taskCtx) throws IgniteCheckedException {
try {
JobContextImpl jobCtx = taskCtx.jobContext();
OutputFormat outputFormat = getOutputFormat(jobCtx);
outputFormat.checkOutputSpecs(jobCtx);
OutputCommitter committer = outputFormat.getOutputCommitter(hadoopContext());
if (committer != null)
committer.setupJob(jobCtx);
} catch (ClassNotFoundException | IOException e) {
throw new IgniteCheckedException(e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IgniteInterruptedCheckedException(e);
}
}
use of org.apache.hadoop.mapreduce.OutputFormat in project ignite by apache.
the class HadoopV2Task method prepareWriter.
/**
* Put write into Hadoop context and return associated output format instance.
*
* @param jobCtx Job context.
* @return Output format.
* @throws IgniteCheckedException In case of Grid exception.
* @throws InterruptedException In case of interrupt.
*/
protected OutputFormat prepareWriter(JobContext jobCtx) throws IgniteCheckedException, InterruptedException {
try {
OutputFormat outputFormat = getOutputFormat(jobCtx);
assert outputFormat != null;
OutputCommitter outCommitter = outputFormat.getOutputCommitter(hadoopCtx);
if (outCommitter != null)
outCommitter.setupTask(hadoopCtx);
RecordWriter writer = outputFormat.getRecordWriter(hadoopCtx);
hadoopCtx.writer(writer);
return outputFormat;
} catch (IOException | ClassNotFoundException e) {
throw new IgniteCheckedException(e);
}
}
use of org.apache.hadoop.mapreduce.OutputFormat in project cdap by caskdata.
the class MultipleOutputsMainOutputWrapper method getRootOutputFormat.
// the root OutputFormat is used only for writing, not for checking output specs or committing of the output
// because the root is also in the delegates, which check the output spec and commit the output.
private OutputFormat<K, V> getRootOutputFormat(JobContext context) {
if (innerFormat == null) {
Configuration conf = context.getConfiguration();
@SuppressWarnings("unchecked") Class<OutputFormat<K, V>> c = (Class<OutputFormat<K, V>>) conf.getClass(ROOT_OUTPUT_FORMAT, FileOutputFormat.class);
innerFormat = ReflectionUtils.newInstance(c, conf);
}
return innerFormat;
}
Aggregations