use of org.apache.hadoop.mapreduce.Reducer in project cdap by caskdata.
the class ReducerWrapper method run.
@SuppressWarnings("unchecked")
@Override
public void run(Context context) throws IOException, InterruptedException {
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(context.getConfiguration());
ClassLoader weakReferenceClassLoader = new WeakReferenceDelegatorClassLoader(classLoader);
BasicMapReduceTaskContext basicMapReduceContext = classLoader.getTaskContextProvider().get(context);
long metricsReportInterval = basicMapReduceContext.getMetricsReportIntervalMillis();
final ReduceTaskMetricsWriter reduceTaskMetricsWriter = new ReduceTaskMetricsWriter(basicMapReduceContext.getProgramMetrics(), context);
// this is a hook for periodic flushing of changes buffered by datasets (to avoid OOME)
WrappedReducer.Context flushingContext = createAutoFlushingContext(context, basicMapReduceContext, reduceTaskMetricsWriter);
basicMapReduceContext.setHadoopContext(flushingContext);
String userReducer = context.getConfiguration().get(ATTR_REDUCER_CLASS);
ClassLoader programClassLoader = classLoader.getProgramClassLoader();
Reducer delegate = createReducerInstance(programClassLoader, userReducer);
// injecting runtime components, like datasets, etc.
try {
Reflections.visit(delegate, delegate.getClass(), new PropertyFieldSetter(basicMapReduceContext.getSpecification().getProperties()), new MetricsFieldSetter(basicMapReduceContext.getMetrics()), new DataSetFieldSetter(basicMapReduceContext));
} catch (Throwable t) {
LOG.error("Failed to inject fields to {}.", delegate.getClass(), t);
throw Throwables.propagate(t);
}
ClassLoader oldClassLoader;
if (delegate instanceof ProgramLifecycle) {
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
((ProgramLifecycle) delegate).initialize(new MapReduceLifecycleContext(basicMapReduceContext));
} catch (Exception e) {
LOG.error("Failed to initialize reducer with {}", basicMapReduceContext, e);
throw Throwables.propagate(e);
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
}
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
delegate.run(flushingContext);
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
// memory by tx agent)
try {
basicMapReduceContext.flushOperations();
} catch (Exception e) {
LOG.error("Failed to flush operations at the end of reducer of " + basicMapReduceContext, e);
throw Throwables.propagate(e);
}
// Close all writers created by MultipleOutputs
basicMapReduceContext.closeMultiOutputs();
if (delegate instanceof ProgramLifecycle) {
oldClassLoader = ClassLoaders.setContextClassLoader(weakReferenceClassLoader);
try {
((ProgramLifecycle<? extends RuntimeContext>) delegate).destroy();
} catch (Exception e) {
LOG.error("Error during destroy of reducer {}", basicMapReduceContext, e);
// Do nothing, try to finish
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
}
reduceTaskMetricsWriter.reportMetrics();
}
use of org.apache.hadoop.mapreduce.Reducer in project Gaffer by gchq.
the class AccumuloKeyValueReducerTest method shouldGetGroupFromElementConverter.
@Test
public void shouldGetGroupFromElementConverter() throws IOException, InterruptedException {
// Given
MockAccumuloElementConverter.mock = mock(AccumuloElementConverter.class);
final Key key = mock(Key.class);
final List<Value> values = Arrays.asList(mock(Value.class), mock(Value.class));
final Reducer.Context context = mock(Reducer.Context.class);
final Configuration conf = mock(Configuration.class);
final Schema schema = new Schema.Builder().edge(TestGroups.ENTITY, new SchemaEdgeDefinition()).build();
final ByteSequence colFamData = mock(ByteSequence.class);
final byte[] colFam = StringUtil.toBytes(TestGroups.ENTITY);
given(context.nextKey()).willReturn(true, false);
given(context.getCurrentKey()).willReturn(key);
given(context.getValues()).willReturn(values);
given(context.getConfiguration()).willReturn(conf);
given(context.getCounter(any(), any())).willReturn(mock(Counter.class));
given(conf.get(SCHEMA)).willReturn(StringUtil.toString(schema.toCompactJson()));
given(conf.get(AccumuloStoreConstants.ACCUMULO_ELEMENT_CONVERTER_CLASS)).willReturn(MockAccumuloElementConverter.class.getName());
given(colFamData.getBackingArray()).willReturn(colFam);
given(key.getColumnFamilyData()).willReturn(colFamData);
given(MockAccumuloElementConverter.mock.getGroupFromColumnFamily(colFam)).willReturn(TestGroups.ENTITY);
final AccumuloKeyValueReducer reducer = new AccumuloKeyValueReducer();
// When
reducer.run(context);
// Then
verify(MockAccumuloElementConverter.mock, times(1)).getGroupFromColumnFamily(colFam);
}
Aggregations