use of com.amazon.randomcutforest.parkservices.AnomalyDescriptor in project random-cut-forest-by-aws by aws.
the class ThresholdedRandomCutForestMapperTest method testRoundTripImpute.
@ParameterizedTest
@MethodSource("args")
public void testRoundTripImpute(TransformMethod transformMethod, ImputationMethod imputationMethod) {
int sampleSize = 256;
int baseDimensions = 1;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE).internalShinglingEnabled(true).shingleSize(shingleSize).transformMethod(transformMethod).imputationMethod(imputationMethod).fillValues(new double[] { 1.0 }).anomalyRate(0.01).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE).internalShinglingEnabled(true).shingleSize(shingleSize).transformMethod(transformMethod).imputationMethod(imputationMethod).fillValues(new double[] { 1.0 }).anomalyRate(0.01).build();
Random r = new Random();
long count = 0;
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5, seed, baseDimensions);
for (double[] point : dataWithKeys.data) {
if (r.nextDouble() > 0.1) {
long stamp = 1000 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, stamp);
AnomalyDescriptor secondResult = second.process(point, stamp);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
}
++count;
}
;
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));
MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed, baseDimensions);
// update re-instantiated forest
for (double[] point : testData.data) {
long stamp = 1000 * count + r.nextInt(10) - 5;
AnomalyDescriptor firstResult = first.process(point, stamp);
// AnomalyDescriptor secondResult = second.process(point, stamp);
AnomalyDescriptor thirdResult = third.process(point, stamp);
// assertEquals(firstResult.getRcfScore(), secondResult.getRcfScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);
++count;
}
}
use of com.amazon.randomcutforest.parkservices.AnomalyDescriptor in project random-cut-forest-by-aws by aws.
the class ThresholdedRandomCutForestMapperTest method testRoundTripStandardShingledInternal.
@Test
public void testRoundTripStandardShingledInternal() {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32).internalShinglingEnabled(true).shingleSize(shingleSize).randomSeed(seed).build();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01).adjustThreshold(true).boundingBoxCacheFraction(0).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01).adjustThreshold(true).build();
double value = 0.75 + 0.5 * new Random().nextDouble();
first.setLowerThreshold(value);
second.setLowerThreshold(value);
Random r = new Random();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5, seed, baseDimensions);
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-4);
if (firstResult.getAnomalyGrade() > 0) {
assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);
assert (firstResult.getRCFScore() >= value);
}
forest.update(point);
}
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));
MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed, baseDimensions);
// update re-instantiated forest
for (double[] point : testData.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
AnomalyDescriptor thirdResult = third.process(point, 0L);
double score = forest.getAnomalyScore(point);
assertEquals(score, firstResult.getRCFScore(), 1e-4);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);
assertEquals(firstResult.getDataConfidence(), thirdResult.getDataConfidence(), 1e-10);
forest.update(point);
}
}
use of com.amazon.randomcutforest.parkservices.AnomalyDescriptor in project random-cut-forest-by-aws by aws.
the class ThresholdedRandomCutForestMapperTest method testRoundTripAugmentedInitial.
@ParameterizedTest
@EnumSource(value = TransformMethod.class)
public void testRoundTripAugmentedInitial(TransformMethod method) {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
// 0.25 * new Random().nextDouble();
double value = 1.0;
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true).shingleSize(shingleSize).transformMethod(method).anomalyRate(0.01).adjustThreshold(true).weights(new double[] { 1.0, 2.0 }).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true).shingleSize(shingleSize).transformMethod(method).anomalyRate(0.01).adjustThreshold(true).weights(new double[] { 1.0, 2.0 }).build();
first.setLowerThreshold(value);
second.setLowerThreshold(value);
int count = 0;
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(sampleSize, 50, 100, 5, seed, baseDimensions);
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
second = mapper.toModel(mapper.toState(second));
}
}
use of com.amazon.randomcutforest.parkservices.AnomalyDescriptor in project random-cut-forest-by-aws by aws.
the class ThresholdedRandomCutForestMapperTest method testRoundTripStandardInitial.
@ParameterizedTest
@EnumSource(value = TransformMethod.class)
public void testRoundTripStandardInitial(TransformMethod method) {
int sampleSize = 256;
int baseDimensions = 2;
int shingleSize = 8;
int dimensions = baseDimensions * shingleSize;
long seed = new Random().nextLong();
ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01).adjustThreshold(true).build();
ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01).adjustThreshold(true).build();
MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(sampleSize, 50, 100, 5, seed, baseDimensions);
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor firstResult = first.process(point, 0L);
AnomalyDescriptor secondResult = second.process(point, 0L);
assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);
assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);
// serialize + deserialize
ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();
second = mapper.toModel(mapper.toState(second));
}
}
use of com.amazon.randomcutforest.parkservices.AnomalyDescriptor in project random-cut-forest-by-aws by aws.
the class Thresholded1DGaussianMix method run.
@Override
public void run() throws Exception {
// Create and populate a random cut forest
int shingleSize = 4;
int numberOfTrees = 50;
int sampleSize = 256;
Precision precision = Precision.FLOAT_32;
int dataSize = 4 * sampleSize;
// change this to try different number of attributes,
// this parameter is not expected to be larger than 5 for this example
int baseDimensions = 1;
int count = 0;
int dimensions = baseDimensions * shingleSize;
ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize).precision(precision).anomalyRate(0.01).forestMode(ForestMode.TIME_AUGMENTED).build();
long seed = new Random().nextLong();
System.out.println("Anomalies would correspond to a run, based on a change of state.");
System.out.println("Each change is normal <-> anomaly; so after the second change the data is normal");
System.out.println("seed = " + seed);
NormalMixtureTestData normalMixtureTestData = new NormalMixtureTestData(10, 1.0, 50, 2.0, 0.01, 0.1);
MultiDimDataWithKey dataWithKeys = normalMixtureTestData.generateTestDataWithKey(dataSize, 1, 0);
int keyCounter = 0;
for (double[] point : dataWithKeys.data) {
AnomalyDescriptor result = forest.process(point, count);
if (keyCounter < dataWithKeys.changeIndices.length && result.getInternalTimeStamp() == dataWithKeys.changeIndices[keyCounter]) {
System.out.println("timestamp " + (result.getInputTimestamp()) + " CHANGE");
++keyCounter;
}
if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {
System.out.println("timestamp " + (count) + " CHANGE ");
++keyCounter;
}
if (result.getAnomalyGrade() != 0) {
System.out.print("timestamp " + (count) + " RESULT value ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getCurrentInput()[i] + ", ");
}
System.out.print("score " + result.getRCFScore() + ", grade " + result.getAnomalyGrade() + ", ");
if (result.isExpectedValuesPresent()) {
if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {
System.out.print(-result.getRelativeIndex() + " steps ago, instead of ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getPastValues()[i] + ", ");
}
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( " + (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
} else {
System.out.print("expected ");
for (int i = 0; i < baseDimensions; i++) {
System.out.print(result.getExpectedValuesList()[0][i] + ", ");
if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {
System.out.print("( " + (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i]) + " ) ");
}
}
}
}
System.out.println();
}
++count;
}
}
Aggregations