use of org.eclipse.test.internal.performance.data.Sample in project eclipse.platform.releng by eclipse.
the class Evaluator method evaluate.
@Override
public void evaluate(PerformanceMeter performanceMeter) throws RuntimeException {
if (fCheckers == null)
// nothing to do
return;
// get reference build tag
Variations refKeys = PerformanceTestPlugin.getAssertAgainst();
String assertKey = System.getProperty(PerformanceTestPlugin.ECLIPSE_PERF_ASSERTAGAINST);
if (refKeys == null) {
// $NON-NLS-1$ //$NON-NLS-2$
PerformanceTestPlugin.logWarning("refkeys was null. " + PerformanceTestPlugin.ECLIPSE_PERF_ASSERTAGAINST + " was " + assertKey);
// nothing to do
return;
}
// else
// $NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
PerformanceTestPlugin.logInfo("refkeys was: " + refKeys.toString() + " \n\t based on " + PerformanceTestPlugin.ECLIPSE_PERF_ASSERTAGAINST + " being set to " + assertKey);
if (!(performanceMeter instanceof InternalPerformanceMeter))
// we cannot handle this.
return;
InternalPerformanceMeter ipm = (InternalPerformanceMeter) performanceMeter;
Sample session = ipm.getSample();
// $NON-NLS-1$
Assert.assertTrue("metering session is null", session != null);
String scenarioName = session.getScenarioID();
// determine all dimensions we need
HashSet<Dim> allDimensions = new HashSet<>();
for (int i = 0; i < fCheckers.length; i++) {
AssertChecker chk = fCheckers[i];
Dim[] dims = chk.getDimensions();
for (int j = 0; j < dims.length; j++) allDimensions.add(dims[j]);
}
// get data for this session
DataPoint[] sessionDatapoints;
Variations config = PerformanceTestPlugin.getVariations();
if (config != null)
sessionDatapoints = DB.queryDataPoints(config, scenarioName, allDimensions);
else
sessionDatapoints = session.getDataPoints();
if (sessionDatapoints == null || sessionDatapoints.length == 0) {
// $NON-NLS-1$ //$NON-NLS-2$
PerformanceTestPlugin.logWarning("no session data named '" + config + "' found");
return;
}
// get reference data
DataPoint[] datapoints = DB.queryDataPoints(refKeys, scenarioName, allDimensions);
if (datapoints == null || datapoints.length == 0) {
// $NON-NLS-1$ //$NON-NLS-2$
PerformanceTestPlugin.logWarning("no reference data named '" + refKeys + "' found");
return;
}
// calculate the average
StatisticsSession referenceStats = new StatisticsSession(datapoints);
StatisticsSession measuredStats = new StatisticsSession(sessionDatapoints);
// $NON-NLS-1$ //$NON-NLS-2$
StringBuffer failMesg = new StringBuffer("Performance criteria not met when compared to '" + refKeys + "':");
boolean pass = true;
for (int i = 0; i < fCheckers.length; i++) {
AssertChecker chk = fCheckers[i];
pass &= chk.test(referenceStats, measuredStats, failMesg);
}
if (!pass) {
if (config != null)
DB.markAsFailed(config, session, failMesg.toString());
// else
// Assert.assertTrue(failMesg.toString(), false);
}
}
use of org.eclipse.test.internal.performance.data.Sample in project eclipse.platform.releng by eclipse.
the class OSPerformanceMeter method getSample.
@Override
public Sample getSample() {
if (fDataPoints != null) {
HashMap runProperties = new HashMap();
collectRunInfo(runProperties);
return new Sample(getScenarioName(), fStartTime, runProperties, fDataPoints.toArray(new DataPoint[fDataPoints.size()]));
}
return null;
}
use of org.eclipse.test.internal.performance.data.Sample in project eclipse.platform.releng by eclipse.
the class SystemTimePerformanceMeter method getSample.
@Override
public Sample getSample() {
Assert.isTrue(fStartTime.size() == fStopTime.size());
Map properties = new HashMap();
/*
* properties.put(DRIVER_PROPERTY, PerformanceTestPlugin.getBuildId()); properties.put(HOSTNAME_PROPERTY, getHostName());
*/
DataPoint[] data = new DataPoint[2 * fStartTime.size()];
for (int i = 0; i < fStartTime.size(); i++) {
data[2 * i] = createDataPoint(BEFORE, InternalDimensions.SYSTEM_TIME, fStartTime.get(i).longValue());
data[2 * i + 1] = createDataPoint(AFTER, InternalDimensions.SYSTEM_TIME, fStopTime.get(i).longValue());
}
return new Sample(getScenarioName(), fStartDate, properties, data);
}
Aggregations