use of org.apache.hadoop.mapreduce.TaskAttemptContext in project mongo-hadoop by mongodb.
the class GridFSInputFormatTest method testReadWholeFileNoDelimiter.
@Test
public void testReadWholeFileNoDelimiter() throws IOException, InterruptedException {
Configuration conf = getConfiguration();
MongoConfigUtil.setGridFSWholeFileSplit(conf, true);
JobContext jobContext = mockJobContext(conf);
List<InputSplit> splits = inputFormat.getSplits(jobContext);
// Empty delimiter == no delimiter.
MongoConfigUtil.setGridFSDelimiterPattern(conf, "");
TaskAttemptContext context = mockTaskAttemptContext(conf);
assertEquals(1, splits.size());
String fileText = null;
for (InputSplit split : splits) {
GridFSInputFormat.GridFSTextRecordReader reader = new GridFSInputFormat.GridFSTextRecordReader();
reader.initialize(split, context);
int i;
for (i = 0; reader.nextKeyValue(); ++i) {
fileText = reader.getCurrentValue().toString();
}
assertEquals(1, i);
}
assertEquals(fileContents.toString(), fileText);
}
use of org.apache.hadoop.mapreduce.TaskAttemptContext in project mongo-hadoop by mongodb.
the class GridFSInputFormatTest method testRecordReaderNoDelimiter.
@Test
public void testRecordReaderNoDelimiter() throws IOException, InterruptedException {
List<InputSplit> splits = getSplits();
Configuration conf = getConfiguration();
// Empty delimiter == no delimiter.
MongoConfigUtil.setGridFSDelimiterPattern(conf, "");
TaskAttemptContext context = mockTaskAttemptContext(conf);
StringBuilder fileText = new StringBuilder();
for (InputSplit split : splits) {
GridFSInputFormat.GridFSTextRecordReader reader = new GridFSInputFormat.GridFSTextRecordReader();
reader.initialize(split, context);
while (reader.nextKeyValue()) {
fileText.append(reader.getCurrentValue().toString());
}
}
assertEquals(fileContents.toString(), fileText.toString());
}
use of org.apache.hadoop.mapreduce.TaskAttemptContext in project cassandra by apache.
the class CqlInputFormat method getSplits.
// Old Hadoop API
public InputSplit[] getSplits(JobConf jobConf, int numSplits) throws IOException {
TaskAttemptContext tac = HadoopCompat.newTaskAttemptContext(jobConf, new TaskAttemptID());
List<org.apache.hadoop.mapreduce.InputSplit> newInputSplits = this.getSplits(tac);
InputSplit[] oldInputSplits = new InputSplit[newInputSplits.size()];
for (int i = 0; i < newInputSplits.size(); i++) oldInputSplits[i] = (ColumnFamilySplit) newInputSplits.get(i);
return oldInputSplits;
}
use of org.apache.hadoop.mapreduce.TaskAttemptContext in project flink by apache.
the class HadoopOutputFormatBase method finalizeGlobal.
@Override
public void finalizeGlobal(int parallelism) throws IOException {
JobContext jobContext;
TaskAttemptContext taskContext;
try {
TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String.format("%" + (6 - Integer.toString(1).length()) + "s", " ").replace(" ", "0") + Integer.toString(1) + "_0");
jobContext = HadoopUtils.instantiateJobContext(this.configuration, new JobID());
taskContext = HadoopUtils.instantiateTaskAttemptContext(this.configuration, taskAttemptID);
this.outputCommitter = this.mapreduceOutputFormat.getOutputCommitter(taskContext);
} catch (Exception e) {
throw new RuntimeException(e);
}
jobContext.getCredentials().addAll(this.credentials);
Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
if (currentUserCreds != null) {
jobContext.getCredentials().addAll(currentUserCreds);
}
// finalize HDFS output format
if (this.outputCommitter != null) {
this.outputCommitter.commitJob(jobContext);
}
}
use of org.apache.hadoop.mapreduce.TaskAttemptContext in project flink by apache.
the class HadoopUtils method instantiateTaskAttemptContext.
public static TaskAttemptContext instantiateTaskAttemptContext(Configuration configuration, TaskAttemptID taskAttemptID) throws Exception {
try {
Class<?> clazz;
// for Hadoop 1.xx
if (JobContext.class.isInterface()) {
clazz = Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
} else // for Hadoop 2.xx
{
clazz = Class.forName("org.apache.hadoop.mapreduce.TaskAttemptContext");
}
Constructor<?> constructor = clazz.getConstructor(Configuration.class, TaskAttemptID.class);
TaskAttemptContext context = (TaskAttemptContext) constructor.newInstance(configuration, taskAttemptID);
return context;
} catch (Exception e) {
throw new Exception("Could not create instance of TaskAttemptContext.");
}
}
Aggregations