use of org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel in project elasticsearch by elastic.
the class MovAvgUnitTests method testEWMAMovAvgModel.
public void testEWMAMovAvgModel() {
double alpha = randomDouble();
MovAvgModel model = new EwmaModel(alpha);
int numValues = randomIntBetween(1, 100);
int windowSize = randomIntBetween(1, 50);
EvictingQueue<Double> window = new EvictingQueue<>(windowSize);
for (int i = 0; i < numValues; i++) {
double randValue = randomDouble();
if (i == 0) {
window.offer(randValue);
continue;
}
double avg = 0;
boolean first = true;
for (double value : window) {
if (first) {
avg = value;
first = false;
} else {
avg = (value * alpha) + (avg * (1 - alpha));
}
}
double expected = avg;
double actual = model.next(window);
assertThat(Double.compare(expected, actual), equalTo(0));
window.offer(randValue);
}
}
use of org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel in project elasticsearch by elastic.
the class SimulatedAnealingMinimizer method minimize.
/**
* Runs the simulated annealing algorithm and produces a model with new coefficients that, theoretically
* fit the data better and generalizes to future forecasts without overfitting.
*
* @param model The MovAvgModel to be optimized for
* @param train A training set provided to the model, which predictions will be
* generated from
* @param test A test set of data to compare the predictions against and derive
* a cost for the model
* @return A new, minimized model that (theoretically) better fits the data
*/
public static MovAvgModel minimize(MovAvgModel model, EvictingQueue<Double> train, double[] test) {
double temp = 1;
double minTemp = 0.0001;
int iterations = 100;
double alpha = 0.9;
MovAvgModel bestModel = model;
MovAvgModel oldModel = model;
double oldCost = cost(model, train, test);
double bestCost = oldCost;
while (temp > minTemp) {
for (int i = 0; i < iterations; i++) {
MovAvgModel newModel = oldModel.neighboringModel();
double newCost = cost(newModel, train, test);
double ap = acceptanceProbability(oldCost, newCost, temp);
if (ap > Math.random()) {
oldModel = newModel;
oldCost = newCost;
if (newCost < bestCost) {
bestCost = newCost;
bestModel = newModel;
}
}
}
temp *= alpha;
}
return bestModel;
}
use of org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel in project elasticsearch by elastic.
the class MovAvgPipelineAggregationBuilder method parse.
public static MovAvgPipelineAggregationBuilder parse(ParseFieldRegistry<MovAvgModel.AbstractModelParser> movingAverageMdelParserRegistry, String pipelineAggregatorName, QueryParseContext context) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token;
String currentFieldName = null;
String[] bucketsPaths = null;
String format = null;
GapPolicy gapPolicy = null;
Integer window = null;
Map<String, Object> settings = null;
String model = null;
Integer predict = null;
Boolean minimize = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
if (WINDOW.match(currentFieldName)) {
window = parser.intValue();
if (window <= 0) {
throw new ParsingException(parser.getTokenLocation(), "[" + currentFieldName + "] value must be a positive, " + "non-zero integer. Value supplied was [" + predict + "] in [" + pipelineAggregatorName + "].");
}
} else if (PREDICT.match(currentFieldName)) {
predict = parser.intValue();
if (predict <= 0) {
throw new ParsingException(parser.getTokenLocation(), "[" + currentFieldName + "] value must be a positive integer." + " Value supplied was [" + predict + "] in [" + pipelineAggregatorName + "].");
}
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_STRING) {
if (FORMAT.match(currentFieldName)) {
format = parser.text();
} else if (BUCKETS_PATH.match(currentFieldName)) {
bucketsPaths = new String[] { parser.text() };
} else if (GAP_POLICY.match(currentFieldName)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
} else if (MODEL.match(currentFieldName)) {
model = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (BUCKETS_PATH.match(currentFieldName)) {
List<String> paths = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
String path = parser.text();
paths.add(path);
}
bucketsPaths = paths.toArray(new String[paths.size()]);
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (SETTINGS.match(currentFieldName)) {
settings = parser.map();
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "].");
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
if (MINIMIZE.match(currentFieldName)) {
minimize = parser.booleanValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" + currentFieldName + "].");
}
} else {
throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " in [" + pipelineAggregatorName + "].");
}
}
if (bucketsPaths == null) {
throw new ParsingException(parser.getTokenLocation(), "Missing required field [" + BUCKETS_PATH.getPreferredName() + "] for movingAvg aggregation [" + pipelineAggregatorName + "]");
}
MovAvgPipelineAggregationBuilder factory = new MovAvgPipelineAggregationBuilder(pipelineAggregatorName, bucketsPaths[0]);
if (format != null) {
factory.format(format);
}
if (gapPolicy != null) {
factory.gapPolicy(gapPolicy);
}
if (window != null) {
factory.window(window);
}
if (predict != null) {
factory.predict(predict);
}
if (model != null) {
MovAvgModel.AbstractModelParser modelParser = movingAverageMdelParserRegistry.lookup(model, parser.getTokenLocation());
MovAvgModel movAvgModel;
try {
movAvgModel = modelParser.parse(settings, pipelineAggregatorName, factory.window());
} catch (ParseException exception) {
throw new ParsingException(parser.getTokenLocation(), "Could not parse settings for model [" + model + "].", exception);
}
factory.model(movAvgModel);
}
if (minimize != null) {
factory.minimize(minimize);
}
return factory;
}
use of org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel in project elasticsearch by elastic.
the class MovAvgUnitTests method testLinearPredictionModel.
public void testLinearPredictionModel() {
MovAvgModel model = new LinearModel();
int windowSize = randomIntBetween(1, 50);
int numPredictions = randomIntBetween(1, 50);
EvictingQueue<Double> window = new EvictingQueue<>(windowSize);
for (int i = 0; i < windowSize; i++) {
window.offer(randomDouble());
}
double[] actual = model.predict(window, numPredictions);
double[] expected = new double[numPredictions];
double avg = 0;
long totalWeight = 1;
long current = 1;
for (double value : window) {
avg += value * current;
totalWeight += current;
current += 1;
}
avg = avg / totalWeight;
Arrays.fill(expected, avg);
for (int i = 0; i < numPredictions; i++) {
assertThat(Double.compare(expected[i], actual[i]), equalTo(0));
}
}
use of org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel in project elasticsearch by elastic.
the class MovAvgUnitTests method testHoltWintersAdditiveModel.
public void testHoltWintersAdditiveModel() {
double alpha = randomDouble();
double beta = randomDouble();
double gamma = randomDouble();
int period = randomIntBetween(1, 10);
MovAvgModel model = new HoltWintersModel(alpha, beta, gamma, period, HoltWintersModel.SeasonalityType.ADDITIVE, false);
// HW requires at least two periods of data
int windowSize = randomIntBetween(period * 2, 50);
EvictingQueue<Double> window = new EvictingQueue<>(windowSize);
for (int i = 0; i < windowSize; i++) {
window.offer(randomDouble());
}
// Smoothed value
double s = 0;
double last_s = 0;
// Trend value
double b = 0;
double last_b = 0;
// Seasonal value
double[] seasonal = new double[windowSize];
int counter = 0;
double[] vs = new double[windowSize];
for (double v : window) {
vs[counter] = v;
counter += 1;
}
// Calculate the slopes between first and second season for each period
for (int i = 0; i < period; i++) {
s += vs[i];
b += (vs[i + period] - vs[i]) / period;
}
s /= period;
b /= period;
last_s = s;
// Calculate first seasonal
if (Double.compare(s, 0.0) == 0 || Double.compare(s, -0.0) == 0) {
Arrays.fill(seasonal, 0.0);
} else {
for (int i = 0; i < period; i++) {
seasonal[i] = vs[i] / s;
}
}
for (int i = period; i < vs.length; i++) {
s = alpha * (vs[i] - seasonal[i - period]) + (1.0d - alpha) * (last_s + last_b);
b = beta * (s - last_s) + (1 - beta) * last_b;
seasonal[i] = gamma * (vs[i] - (last_s - last_b)) + (1 - gamma) * seasonal[i - period];
last_s = s;
last_b = b;
}
int idx = window.size() - period + (0 % period);
double expected = s + (1 * b) + seasonal[idx];
double actual = model.next(window);
assertThat(Double.compare(expected, actual), equalTo(0));
}
Aggregations