use of org.apache.hadoop.hdfs.DFSHedgedReadMetrics in project hbase by apache.
the class FSUtils method getDFSHedgedReadMetrics.
/**
* @param c
* @return The DFSClient DFSHedgedReadMetrics instance or null if can't be found or not on hdfs.
* @throws IOException
*/
public static DFSHedgedReadMetrics getDFSHedgedReadMetrics(final Configuration c) throws IOException {
if (!isHDFS(c))
return null;
// getHedgedReadMetrics is package private. Get the DFSClient instance that is internal
// to the DFS FS instance and make the method getHedgedReadMetrics accessible, then invoke it
// to get the singleton instance of DFSHedgedReadMetrics shared by DFSClients.
final String name = "getHedgedReadMetrics";
DFSClient dfsclient = ((DistributedFileSystem) FileSystem.get(c)).getClient();
Method m;
try {
m = dfsclient.getClass().getDeclaredMethod(name);
} catch (NoSuchMethodException e) {
LOG.warn("Failed find method " + name + " in dfsclient; no hedged read metrics: " + e.getMessage());
return null;
} catch (SecurityException e) {
LOG.warn("Failed find method " + name + " in dfsclient; no hedged read metrics: " + e.getMessage());
return null;
}
m.setAccessible(true);
try {
return (DFSHedgedReadMetrics) m.invoke(dfsclient);
} catch (IllegalAccessException e) {
LOG.warn("Failed invoking method " + name + " on dfsclient; no hedged read metrics: " + e.getMessage());
return null;
} catch (IllegalArgumentException e) {
LOG.warn("Failed invoking method " + name + " on dfsclient; no hedged read metrics: " + e.getMessage());
return null;
} catch (InvocationTargetException e) {
LOG.warn("Failed invoking method " + name + " on dfsclient; no hedged read metrics: " + e.getMessage());
return null;
}
}
use of org.apache.hadoop.hdfs.DFSHedgedReadMetrics in project hbase by apache.
the class TestFSUtils method testDFSHedgedReadMetrics.
/**
* Ugly test that ensures we can get at the hedged read counters in dfsclient.
* Does a bit of preading with hedged reads enabled using code taken from hdfs TestPread.
* @throws Exception
*/
@Test
public void testDFSHedgedReadMetrics() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
// Enable hedged reads and set it so the threshold is really low.
// Most of this test is taken from HDFS, from TestPread.
Configuration conf = htu.getConfiguration();
conf.setInt(DFSConfigKeys.DFS_DFSCLIENT_HEDGED_READ_THREADPOOL_SIZE, 5);
conf.setLong(DFSConfigKeys.DFS_DFSCLIENT_HEDGED_READ_THRESHOLD_MILLIS, 0);
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 4096);
conf.setLong(DFSConfigKeys.DFS_CLIENT_READ_PREFETCH_SIZE_KEY, 4096);
// Set short retry timeouts so this test runs faster
conf.setInt(DFSConfigKeys.DFS_CLIENT_RETRY_WINDOW_BASE, 0);
conf.setBoolean("dfs.datanode.transferTo.allowed", false);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
// Get the metrics. Should be empty.
DFSHedgedReadMetrics metrics = FSUtils.getDFSHedgedReadMetrics(conf);
assertEquals(0, metrics.getHedgedReadOps());
FileSystem fileSys = cluster.getFileSystem();
try {
Path p = new Path("preadtest.dat");
// We need > 1 blocks to test out the hedged reads.
DFSTestUtil.createFile(fileSys, p, 12 * blockSize, 12 * blockSize, blockSize, (short) 3, seed);
pReadFile(fileSys, p);
cleanupFile(fileSys, p);
assertTrue(metrics.getHedgedReadOps() > 0);
} finally {
fileSys.close();
cluster.shutdown();
}
}
Aggregations