use of com.linkedin.drelephant.configurations.fetcher.FetcherConfiguration in project dr-elephant by linkedin.
the class ElephantContext method loadFetchers.
/**
* Load all the fetchers configured in FetcherConf.xml
*/
private void loadFetchers() {
Document document = Utils.loadXMLDoc(FETCHERS_CONF);
_fetchersConfData = new FetcherConfiguration(document.getDocumentElement()).getFetchersConfigurationData();
for (FetcherConfigurationData data : _fetchersConfData) {
try {
Class<?> fetcherClass = Class.forName(data.getClassName());
Object instance = fetcherClass.getConstructor(FetcherConfigurationData.class).newInstance(data);
if (!(instance instanceof ElephantFetcher)) {
throw new IllegalArgumentException("Class " + fetcherClass.getName() + " is not an implementation of " + ElephantFetcher.class.getName());
}
ApplicationType type = data.getAppType();
if (_typeToFetcher.get(type) == null) {
_typeToFetcher.put(type, (ElephantFetcher) instance);
}
logger.info("Load Fetcher : " + data.getClassName());
} catch (ClassNotFoundException e) {
throw new RuntimeException("Could not find class " + data.getClassName(), e);
} catch (InstantiationException e) {
throw new RuntimeException("Could not instantiate class " + data.getClassName(), e);
} catch (IllegalAccessException e) {
throw new RuntimeException("Could not access constructor for class" + data.getClassName(), e);
} catch (RuntimeException e) {
throw new RuntimeException(data.getClassName() + " is not a valid Fetcher class.", e);
} catch (InvocationTargetException e) {
throw new RuntimeException("Could not invoke class " + data.getClassName(), e);
} catch (NoSuchMethodException e) {
throw new RuntimeException("Could not find constructor for class " + data.getClassName(), e);
}
}
}
use of com.linkedin.drelephant.configurations.fetcher.FetcherConfiguration in project dr-elephant by linkedin.
the class MapReduceFSFetcherHadoop2Test method testFetcherConfig.
@Test
public void testFetcherConfig() {
FetcherConfiguration fetcherConf = new FetcherConfiguration(document10.getDocumentElement());
try {
MapReduceFSFetcherHadoop2 fetcher = new MapReduceFSFetcherHadoop2(fetcherConf.getFetchersConfigurationData().get(0));
Assert.assertTrue("Failed to enable sampling", fetcher.isSamplingEnabled());
Assert.assertEquals(200d, fetcher.getMaxLogSizeInMB(), 0.0001);
Assert.assertEquals(TimeZone.getTimeZone("PST"), fetcher.getTimeZone());
List<Object> list = new ArrayList<Object>();
int listLen = fetcher.MAX_SAMPLE_SIZE * 2;
for (int i = 0; i < listLen; i++) {
list.add(0);
}
Assert.assertEquals("Should sample task list when sampling is enabled", fetcher.MAX_SAMPLE_SIZE, fetcher.sampleAndGetSize("appId", list));
} catch (IOException e) {
Assert.assertNull("Failed to initialize FileSystem", e);
}
}
use of com.linkedin.drelephant.configurations.fetcher.FetcherConfiguration in project dr-elephant by linkedin.
the class MapReduceFSFetcherHadoop2Test method testGetTaskData.
@Test
public void testGetTaskData() {
FetcherConfiguration fetcherConf = new FetcherConfiguration(document9.getDocumentElement());
try {
MapReduceFSFetcherHadoop2 fetcher = new MapReduceFSFetcherHadoop2(fetcherConf.getFetchersConfigurationData().get(0));
String jobId = "job_14000_001";
List<JobHistoryParser.TaskInfo> infoList = new ArrayList<JobHistoryParser.TaskInfo>();
infoList.add(new MockTaskInfo(1, true));
infoList.add(new MockTaskInfo(2, false));
MapReduceTaskData[] taskList = fetcher.getTaskData(jobId, infoList);
Assert.assertNotNull("taskList should not be null.", taskList);
int succeededTaskCount = 0;
for (MapReduceTaskData task : taskList) {
Assert.assertNotNull("Null pointer in taskList.", task);
if (task.getState().equals("SUCCEEDED")) {
succeededTaskCount++;
}
}
Assert.assertEquals("Should have total two tasks.", 2, taskList.length);
Assert.assertEquals("Should have only one succeeded task.", 1, succeededTaskCount);
} catch (IOException e) {
Assert.assertNull("Failed to initialize FileSystem.", e);
}
}
use of com.linkedin.drelephant.configurations.fetcher.FetcherConfiguration in project dr-elephant by linkedin.
the class MapReduceFSFetcherHadoop2Test method testFetcherEmptyConf.
@Test
public void testFetcherEmptyConf() {
FetcherConfiguration fetcherConf = new FetcherConfiguration(document11.getDocumentElement());
try {
MapReduceFSFetcherHadoop2 fetcher = new MapReduceFSFetcherHadoop2(fetcherConf.getFetchersConfigurationData().get(0));
Assert.assertFalse("Sampling should be disabled in default", fetcher.isSamplingEnabled());
Assert.assertEquals(fetcher.DEFALUT_MAX_LOG_SIZE_IN_MB, fetcher.getMaxLogSizeInMB(), 0.0001);
Assert.assertEquals(TimeZone.getDefault(), fetcher.getTimeZone());
List<Object> list = new ArrayList<Object>();
int listLen = fetcher.MAX_SAMPLE_SIZE * 2;
for (int i = 0; i < listLen; i++) {
list.add(0);
}
Assert.assertEquals("Should not sample task list when sampling is disabled", listLen, fetcher.sampleAndGetSize("appId", list));
} catch (IOException e) {
Assert.assertNull("Failed to initialize FileSystem", e);
}
}
use of com.linkedin.drelephant.configurations.fetcher.FetcherConfiguration in project dr-elephant by linkedin.
the class MapReduceFSFetcherHadoop2Test method testGetHistoryDir.
@Test
public void testGetHistoryDir() {
FetcherConfiguration fetcherConf = new FetcherConfiguration(document9.getDocumentElement());
try {
MapReduceFSFetcherHadoop2 fetcher = new MapReduceFSFetcherHadoop2(fetcherConf.getFetchersConfigurationData().get(0));
Calendar timestamp = Calendar.getInstance();
timestamp.set(2016, Calendar.JULY, 30);
AnalyticJob job = new AnalyticJob().setAppId("application_1461566847127_84624").setFinishTime(timestamp.getTimeInMillis());
String expected = StringUtils.join(new String[] { fetcher.getHistoryLocation(), "2016", "07", "30", "000084", "" }, File.separator);
Assert.assertEquals("Error history directory", expected, fetcher.getHistoryDir(job));
} catch (IOException e) {
Assert.assertNull("Failed to initialize FileSystem", e);
}
}
Aggregations