use of org.apache.hadoop.mapreduce.JobContext in project hive by apache.
the class MultiOutputFormat method checkOutputSpecs.
@Override
public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException {
for (String alias : getOutputFormatAliases(context)) {
LOGGER.debug("Calling checkOutputSpecs for alias: " + alias);
JobContext aliasContext = getJobContext(alias, context);
OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext);
outputFormat.checkOutputSpecs(aliasContext);
// Copy credentials and any new config added back to JobContext
context.getCredentials().addAll(aliasContext.getCredentials());
setAliasConf(alias, context, aliasContext);
}
}
use of org.apache.hadoop.mapreduce.JobContext in project hive by apache.
the class MultiOutputFormat method getJobContext.
/**
* Get the JobContext with the related OutputFormat configuration populated given the alias
* and the actual JobContext
* @param alias the name given to the OutputFormat configuration
* @param context the JobContext
* @return a copy of the JobContext with the alias configuration populated
*/
public static JobContext getJobContext(String alias, JobContext context) {
String aliasConf = context.getConfiguration().get(getAliasConfName(alias));
JobContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createJobContext(context.getConfiguration(), context.getJobID());
addToConfig(aliasConf, aliasContext.getConfiguration());
return aliasContext;
}
use of org.apache.hadoop.mapreduce.JobContext in project gora by apache.
the class GoraMapReduceUtils method getSplits.
public static List<InputSplit> getSplits(Configuration conf, String inputPath) throws IOException {
JobContext context = createJobContext(conf, inputPath);
HelperInputFormat<?, ?> inputFormat = new HelperInputFormat<Object, Object>();
return inputFormat.getSplits(context);
}
use of org.apache.hadoop.mapreduce.JobContext in project flink by apache.
the class HadoopUtils method instantiateJobContext.
public static JobContext instantiateJobContext(Configuration configuration, JobID jobId) throws Exception {
try {
Class<?> clazz;
// for Hadoop 1.xx
if (JobContext.class.isInterface()) {
clazz = Class.forName("org.apache.hadoop.mapreduce.task.JobContextImpl", true, Thread.currentThread().getContextClassLoader());
} else // for Hadoop 2.xx
{
clazz = Class.forName("org.apache.hadoop.mapreduce.JobContext", true, Thread.currentThread().getContextClassLoader());
}
Constructor<?> constructor = clazz.getConstructor(Configuration.class, JobID.class);
JobContext context = (JobContext) constructor.newInstance(configuration, jobId);
return context;
} catch (Exception e) {
throw new Exception("Could not create instance of JobContext.");
}
}
use of org.apache.hadoop.mapreduce.JobContext in project druid by druid-io.
the class DatasourceInputFormatTest method testGetSplitsUsingDefaultSupplier.
@Test
public void testGetSplitsUsingDefaultSupplier() throws Exception {
// Use the builtin supplier, reading from the local filesystem, rather than testFormatter.
final File tmpFile = temporaryFolder.newFile("something:with:colons");
Files.write("dummy", tmpFile, Charsets.UTF_8);
final ImmutableList<WindowedDataSegment> mySegments = ImmutableList.of(WindowedDataSegment.of(new DataSegment("test1", Interval.parse("2000/3000"), "ver", ImmutableMap.<String, Object>of("type", "local", "path", tmpFile.getPath()), ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts"), NoneShardSpec.instance(), 9, 2)));
final JobConf myConfig = new JobConf();
myConfig.set(DatasourceInputFormat.CONF_INPUT_SEGMENTS, new DefaultObjectMapper().writeValueAsString(mySegments));
final JobContext myContext = EasyMock.createMock(JobContext.class);
EasyMock.expect(myContext.getConfiguration()).andReturn(myConfig);
EasyMock.replay(myContext);
final List<InputSplit> splits = new DatasourceInputFormat().getSplits(myContext);
Assert.assertEquals(1, splits.size());
final DatasourceInputSplit theSplit = (DatasourceInputSplit) Iterables.getOnlyElement(splits);
Assert.assertEquals(mySegments.get(0).getSegment().getSize(), theSplit.getLength());
Assert.assertEquals(mySegments, theSplit.getSegments());
Assert.assertArrayEquals(new String[] { "localhost" }, theSplit.getLocations());
}
Aggregations