use of org.apache.hadoop.tools.rumen.ResourceUsageMetrics in project hadoop by apache.
the class LoadSplit method readFields.
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
id = WritableUtils.readVInt(in);
maps = WritableUtils.readVInt(in);
inputRecords = WritableUtils.readVLong(in);
outputBytes = WritableUtils.readVLong(in);
outputRecords = WritableUtils.readVLong(in);
maxMemory = WritableUtils.readVLong(in);
reduces = WritableUtils.readVInt(in);
if (reduceBytes.length < reduces) {
reduceBytes = new double[reduces];
reduceRecords = new double[reduces];
}
for (int i = 0; i < reduces; ++i) {
reduceBytes[i] = in.readDouble();
reduceRecords[i] = in.readDouble();
}
nSpec = WritableUtils.readVInt(in);
if (reduceOutputBytes.length < nSpec) {
reduceOutputBytes = new long[nSpec];
reduceOutputRecords = new long[nSpec];
}
for (int i = 0; i < nSpec; ++i) {
reduceOutputBytes[i] = WritableUtils.readVLong(in);
reduceOutputRecords[i] = WritableUtils.readVLong(in);
}
mapMetrics = new ResourceUsageMetrics();
mapMetrics.readFields(in);
int numReduceMetrics = WritableUtils.readVInt(in);
reduceMetrics = new ResourceUsageMetrics[numReduceMetrics];
for (int i = 0; i < numReduceMetrics; ++i) {
reduceMetrics[i] = new ResourceUsageMetrics();
reduceMetrics[i].readFields(in);
}
}
use of org.apache.hadoop.tools.rumen.ResourceUsageMetrics in project hadoop by apache.
the class TestResourceUsageEmulators method createMetrics.
// Creates a ResourceUsageMetrics object from the target usage
static ResourceUsageMetrics createMetrics(long target) {
ResourceUsageMetrics metrics = new ResourceUsageMetrics();
metrics.setCumulativeCpuUsage(target);
metrics.setVirtualMemoryUsage(target);
metrics.setPhysicalMemoryUsage(target);
metrics.setHeapUsage(target);
return metrics;
}
use of org.apache.hadoop.tools.rumen.ResourceUsageMetrics in project hadoop by apache.
the class TestGridMixClasses method testGridmixSplit.
/*
* simple test GridmixSplit (copy, getters, write, read..)
*/
@Test(timeout = 1000)
public void testGridmixSplit() throws Exception {
Path[] files = { new Path("one"), new Path("two") };
long[] start = { 1, 2 };
long[] lengths = { 100, 200 };
String[] locations = { "locOne", "loctwo" };
CombineFileSplit cfSplit = new CombineFileSplit(files, start, lengths, locations);
ResourceUsageMetrics metrics = new ResourceUsageMetrics();
metrics.setCumulativeCpuUsage(200);
double[] reduceBytes = { 8.1d, 8.2d };
double[] reduceRecords = { 9.1d, 9.2d };
long[] reduceOutputBytes = { 101L, 102L };
long[] reduceOutputRecords = { 111L, 112L };
GridmixSplit test = new GridmixSplit(cfSplit, 2, 3, 4L, 5L, 6L, 7L, reduceBytes, reduceRecords, reduceOutputBytes, reduceOutputRecords);
ByteArrayOutputStream data = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(data);
test.write(out);
GridmixSplit copy = new GridmixSplit();
copy.readFields(new DataInputStream(new ByteArrayInputStream(data.toByteArray())));
// data should be the same
assertEquals(test.getId(), copy.getId());
assertEquals(test.getMapCount(), copy.getMapCount());
assertEquals(test.getInputRecords(), copy.getInputRecords());
assertEquals(test.getOutputBytes()[0], copy.getOutputBytes()[0]);
assertEquals(test.getOutputRecords()[0], copy.getOutputRecords()[0]);
assertEquals(test.getReduceBytes(0), copy.getReduceBytes(0));
assertEquals(test.getReduceRecords(0), copy.getReduceRecords(0));
}
use of org.apache.hadoop.tools.rumen.ResourceUsageMetrics in project hadoop by apache.
the class TestGridMixClasses method testLoadJobLoadRecordReader.
/*
* test LoadRecordReader. It class reads data from some files.
*/
@Test(timeout = 3000)
public void testLoadJobLoadRecordReader() throws Exception {
LoadJob.LoadRecordReader test = new LoadJob.LoadRecordReader();
Configuration conf = new Configuration();
FileSystem fs1 = mock(FileSystem.class);
when(fs1.open((Path) anyObject())).thenReturn(new FakeFSDataInputStream(new FakeInputStream()));
Path p1 = mock(Path.class);
when(p1.getFileSystem((JobConf) anyObject())).thenReturn(fs1);
FileSystem fs2 = mock(FileSystem.class);
when(fs2.open((Path) anyObject())).thenReturn(new FakeFSDataInputStream(new FakeInputStream()));
Path p2 = mock(Path.class);
when(p2.getFileSystem((JobConf) anyObject())).thenReturn(fs2);
Path[] paths = { p1, p2 };
long[] start = { 0, 0 };
long[] lengths = { 1000, 1000 };
String[] locations = { "temp1", "temp2" };
CombineFileSplit cfsplit = new CombineFileSplit(paths, start, lengths, locations);
double[] reduceBytes = { 100, 100 };
double[] reduceRecords = { 2, 2 };
long[] reduceOutputBytes = { 500, 500 };
long[] reduceOutputRecords = { 2, 2 };
ResourceUsageMetrics metrics = new ResourceUsageMetrics();
ResourceUsageMetrics[] rMetrics = { new ResourceUsageMetrics(), new ResourceUsageMetrics() };
LoadSplit input = new LoadSplit(cfsplit, 2, 3, 1500L, 2L, 3000L, 2L, reduceBytes, reduceRecords, reduceOutputBytes, reduceOutputRecords, metrics, rMetrics);
TaskAttemptID taskId = new TaskAttemptID();
TaskAttemptContext ctx = new TaskAttemptContextImpl(conf, taskId);
test.initialize(input, ctx);
GridmixRecord gr = test.getCurrentValue();
int counter = 0;
while (test.nextKeyValue()) {
gr = test.getCurrentValue();
if (counter == 0) {
// read first file
assertEquals(0.5, test.getProgress(), 0.001);
} else if (counter == 1) {
// read second file
assertEquals(1.0, test.getProgress(), 0.001);
}
//
assertEquals(1000, gr.getSize());
counter++;
}
assertEquals(1000, gr.getSize());
// Two files have been read
assertEquals(2, counter);
test.close();
}
use of org.apache.hadoop.tools.rumen.ResourceUsageMetrics in project hadoop by apache.
the class TestGridMixClasses method getLoadSplit.
private LoadSplit getLoadSplit() throws Exception {
Path[] files = { new Path("one"), new Path("two") };
long[] start = { 1, 2 };
long[] lengths = { 100, 200 };
String[] locations = { "locOne", "loctwo" };
CombineFileSplit cfSplit = new CombineFileSplit(files, start, lengths, locations);
ResourceUsageMetrics metrics = new ResourceUsageMetrics();
metrics.setCumulativeCpuUsage(200);
ResourceUsageMetrics[] rMetrics = { metrics };
double[] reduceBytes = { 8.1d, 8.2d };
double[] reduceRecords = { 9.1d, 9.2d };
long[] reduceOutputBytes = { 101L, 102L };
long[] reduceOutputRecords = { 111L, 112L };
return new LoadSplit(cfSplit, 2, 1, 4L, 5L, 6L, 7L, reduceBytes, reduceRecords, reduceOutputBytes, reduceOutputRecords, metrics, rMetrics);
}
Aggregations