use of com.simiacryptus.util.data.ScalarStatistics in project MindsEye by SimiaCryptus.
the class MonitoringWrapperLayer method getMetrics.
@Nonnull
@Override
public Map<CharSequence, Object> getMetrics() {
@Nonnull final HashMap<CharSequence, Object> map = new HashMap<>();
map.put("class", getInner().getClass().getName());
map.put("totalBatches", totalBatches);
map.put("totalItems", totalItems);
map.put("outputStatistics", forwardSignal.getMetrics());
map.put("backpropStatistics", backwardSignal.getMetrics());
if (verbose) {
map.put("forwardPerformance", forwardPerformance.getMetrics());
map.put("backwardPerformance", backwardPerformance.getMetrics());
}
final double batchesPerItem = totalBatches * 1.0 / totalItems;
map.put("avgMsPerItem", 1000 * batchesPerItem * forwardPerformance.getMean());
map.put("medianMsPerItem", 1000 * batchesPerItem * forwardPerformance.getPercentile(0.5));
final double backpropMean = backwardPerformance.getMean();
final double backpropMedian = backwardPerformance.getPercentile(0.5);
map.put("avgMsPerItem_Backward", 1000 * batchesPerItem * backpropMean);
map.put("medianMsPerItem_Backward", 1000 * batchesPerItem * backpropMedian);
@Nullable final List<double[]> state = state();
@Nonnull final ScalarStatistics statistics = new PercentileStatistics();
for (@Nonnull final double[] s : state) {
for (final double v : s) {
statistics.add(v);
}
}
if (statistics.getCount() > 0) {
@Nonnull final HashMap<CharSequence, Object> weightStats = new HashMap<>();
weightStats.put("buffers", state.size());
weightStats.putAll(statistics.getMetrics());
map.put("weights", weightStats);
}
return map;
}
use of com.simiacryptus.util.data.ScalarStatistics in project MindsEye by SimiaCryptus.
the class BatchingTester method test.
/**
* Test tolerance statistics.
*
* @param reference the reference
* @param inputPrototype the input prototype
* @return the tolerance statistics
*/
@Nonnull
public ToleranceStatistics test(@Nullable final Layer reference, @Nonnull final Tensor[] inputPrototype) {
if (null == reference)
return new ToleranceStatistics();
final TensorList[] inputTensorLists = Arrays.stream(inputPrototype).map(t -> TensorArray.wrap(IntStream.range(0, getBatchSize()).mapToObj(i -> t.map(v -> getRandom())).toArray(i -> new Tensor[i]))).toArray(i -> new TensorList[i]);
@Nonnull final SimpleResult asABatch;
final List<SimpleEval> oneAtATime;
try {
asABatch = SimpleListEval.run(reference, inputTensorLists);
oneAtATime = IntStream.range(0, getBatchSize()).mapToObj(batch -> {
Tensor[] inputTensors = IntStream.range(0, inputTensorLists.length).mapToObj(i -> inputTensorLists[i].get(batch)).toArray(i -> new Tensor[i]);
@Nonnull SimpleEval eval = SimpleEval.run(reference, inputTensors);
for (@Nonnull Tensor tensor : inputTensors) {
tensor.freeRef();
}
return eval;
}).collect(Collectors.toList());
} finally {
for (@Nonnull TensorList tensorList : inputTensorLists) {
tensorList.freeRef();
}
}
try {
TensorList batchOutput = asABatch.getOutput();
@Nonnull IntFunction<ToleranceStatistics> toleranceStatisticsIntFunction = batch -> {
@Nullable Tensor batchTensor = batchOutput.get(batch);
@Nonnull ToleranceStatistics accumulate = new ToleranceStatistics().accumulate(batchTensor.getData(), oneAtATime.get(batch).getOutput().getData());
batchTensor.freeRef();
return accumulate;
};
int batchLength = batchOutput.length();
@Nonnull final ToleranceStatistics outputAgreement = IntStream.range(0, Math.min(getBatchSize(), batchLength)).mapToObj(toleranceStatisticsIntFunction).reduce((a, b) -> a.combine(b)).get();
if (!(outputAgreement.absoluteTol.getMax() < tolerance)) {
logger.info("Batch Output: " + batchOutput.stream().map(x -> {
String str = x.prettyPrint();
x.freeRef();
return str;
}).collect(Collectors.toList()));
logger.info("Singular Output: " + oneAtATime.stream().map(x -> x.getOutput().prettyPrint()).collect(Collectors.toList()));
throw new AssertionError("Output Corrupt: " + outputAgreement);
}
ToleranceStatistics derivativeAgreement = IntStream.range(0, Math.min(getBatchSize(), batchLength)).mapToObj(batch -> {
IntFunction<ToleranceStatistics> statisticsFunction = input -> {
@Nullable Tensor a = asABatch.getInputDerivative()[input].get(batch);
Tensor b = oneAtATime.get(batch).getDerivative()[input];
@Nonnull Tensor diff = a.minus(b);
logger.info("Error: " + diff.prettyPrint());
logger.info("Scalar Statistics: " + new ScalarStatistics().add(diff.getData()).getMetrics());
double[][] points = Arrays.stream(diff.getData()).mapToObj(x -> new double[] { x }).toArray(i -> new double[i][]);
// logger.info("Density: " + new DensityTree("x").setMinSplitFract(1e-8).setSplitSizeThreshold(2).new Node(points));
diff.freeRef();
@Nonnull ToleranceStatistics toleranceStatistics = new ToleranceStatistics().accumulate(a.getData(), b.getData());
a.freeRef();
return toleranceStatistics;
};
return IntStream.range(0, Math.min(inputPrototype.length, batchLength)).mapToObj(statisticsFunction).reduce((a, b) -> a.combine(b)).orElse(null);
}).filter(x -> x != null).reduce((a, b) -> a.combine(b)).orElse(null);
if (null != derivativeAgreement && !(derivativeAgreement.absoluteTol.getMax() < tolerance)) {
throw new AssertionError("Derivatives Corrupt: " + derivativeAgreement);
}
return null != derivativeAgreement ? derivativeAgreement.combine(outputAgreement) : outputAgreement;
} finally {
asABatch.freeRef();
oneAtATime.forEach(x -> x.freeRef());
}
}
use of com.simiacryptus.util.data.ScalarStatistics in project MindsEye by SimiaCryptus.
the class SingleDerivativeTester method testFeedback.
/**
* Test feedback tolerance statistics.
*
* @param statistics the statistics
* @param component the component
* @param inputPrototype the input prototype
* @param outputPrototype the output prototype
* @return the tolerance statistics
*/
@Nonnull
public ToleranceStatistics testFeedback(@Nonnull ToleranceStatistics statistics, @Nonnull Layer component, @Nonnull Tensor[] inputPrototype, @Nonnull Tensor outputPrototype) {
Optional<ToleranceStatistics> optional = IntStream.range(0, inputPrototype.length).mapToObj(i -> {
@Nullable final Tensor measuredGradient = !verify ? null : measureFeedbackGradient(component, i, outputPrototype, inputPrototype);
@Nonnull final Tensor implementedGradient = getFeedbackGradient(component, i, outputPrototype, inputPrototype);
@Nonnull Tensor difference = measuredGradient.minus(implementedGradient);
try {
final ToleranceStatistics result = IntStream.range(0, null == measuredGradient ? 0 : measuredGradient.length()).mapToObj(i1 -> {
return new ToleranceStatistics().accumulate(measuredGradient.getData()[i1], implementedGradient.getData()[i1]);
}).reduce((a, b) -> a.combine(b)).orElse(new ToleranceStatistics());
if (!(result.absoluteTol.getMax() < tolerance))
throw new AssertionError(result.toString());
// log.info(String.format("Component: %s", component));
if (verbose) {
log.info(String.format("Feedback for input %s", i));
log.info(String.format("Inputs Values: %s", inputPrototype[i].prettyPrint()));
log.info(String.format("Value Statistics: %s", new ScalarStatistics().add(inputPrototype[i].getData())));
log.info(String.format("Implemented Feedback: %s", implementedGradient.prettyPrint()));
log.info(String.format("Implemented Statistics: %s", new ScalarStatistics().add(implementedGradient.getData())));
if (null != measuredGradient) {
log.info(String.format("Measured Feedback: %s", measuredGradient.prettyPrint()));
log.info(String.format("Measured Statistics: %s", new ScalarStatistics().add(measuredGradient.getData())));
log.info(String.format("Feedback Error: %s", difference.prettyPrint()));
log.info(String.format("Error Statistics: %s", new ScalarStatistics().add(difference.getData())));
}
}
difference.freeRef();
measuredGradient.freeRef();
implementedGradient.freeRef();
return result;
} catch (@Nonnull final Throwable e) {
// log.info(String.format("Component: %s", component));
log.info(String.format("Feedback for input %s", i));
log.info(String.format("Inputs Values: %s", inputPrototype[i].prettyPrint()));
log.info(String.format("Value Statistics: %s", new ScalarStatistics().add(inputPrototype[i].getData())));
log.info(String.format("Implemented Feedback: %s", implementedGradient.prettyPrint()));
log.info(String.format("Implemented Statistics: %s", new ScalarStatistics().add(implementedGradient.getData())));
if (null != measuredGradient) {
log.info(String.format("Measured: %s", measuredGradient.prettyPrint()));
log.info(String.format("Measured Statistics: %s", new ScalarStatistics().add(measuredGradient.getData())));
log.info(String.format("Feedback Error: %s", difference.prettyPrint()));
log.info(String.format("Error Statistics: %s", new ScalarStatistics().add(difference.getData())));
}
measuredGradient.freeRef();
implementedGradient.freeRef();
difference.freeRef();
throw e;
}
}).reduce((a, b) -> a.combine(b));
if (!optional.isPresent())
return statistics;
return statistics.combine(optional.orElse(null));
}
use of com.simiacryptus.util.data.ScalarStatistics in project MindsEye by SimiaCryptus.
the class SingleDerivativeTester method testLearning.
/**
* Test learning tolerance statistics.
*
* @param prev the prev
* @param component the component
* @param inputPrototype the input prototype
* @param outputPrototype the output prototype
* @return the tolerance statistics
*/
public ToleranceStatistics testLearning(@Nonnull ToleranceStatistics prev, @Nonnull Layer component, Tensor[] inputPrototype, @Nonnull Tensor outputPrototype) {
return IntStream.range(0, component.state().size()).mapToObj(i -> {
@Nullable final Tensor measuredGradient = !verify ? null : measureLearningGradient(component, i, outputPrototype, inputPrototype);
@Nonnull final Tensor implementedGradient = getLearningGradient(component, i, outputPrototype, inputPrototype);
@Nonnull Tensor difference = measuredGradient.minus(implementedGradient);
try {
final ToleranceStatistics result = IntStream.range(0, null == measuredGradient ? 0 : measuredGradient.length()).mapToObj(i1 -> {
return new ToleranceStatistics().accumulate(measuredGradient.getData()[i1], implementedGradient.getData()[i1]);
}).reduce((a, b) -> a.combine(b)).orElse(new ToleranceStatistics());
if (!(result.absoluteTol.getMax() < tolerance)) {
throw new AssertionError(result.toString());
} else {
// log.info(String.format("Component: %s", component));
if (verbose) {
log.info(String.format("Learning Gradient for weight setByCoord %s", i));
log.info(String.format("Weights: %s", Tensor.prettyPrint(component.state().get(i))));
log.info(String.format("Implemented Gradient: %s", implementedGradient.prettyPrint()));
log.info(String.format("Implemented Statistics: %s", new ScalarStatistics().add(implementedGradient.getData())));
if (null != measuredGradient) {
log.info(String.format("Measured Gradient: %s", measuredGradient.prettyPrint()));
log.info(String.format("Measured Statistics: %s", new ScalarStatistics().add(measuredGradient.getData())));
log.info(String.format("Gradient Error: %s", difference.prettyPrint()));
log.info(String.format("Error Statistics: %s", new ScalarStatistics().add(difference.getData())));
}
}
difference.freeRef();
return result;
}
} catch (@Nonnull final Throwable e) {
// log.info(String.format("Component: %s", component));
log.info(String.format("Learning Gradient for weight setByCoord %s", i));
log.info(String.format("Implemented Gradient: %s", implementedGradient.prettyPrint()));
log.info(String.format("Implemented Statistics: %s", new ScalarStatistics().add(implementedGradient.getData())));
if (null != measuredGradient) {
log.info(String.format("Measured Gradient: %s", measuredGradient.prettyPrint()));
log.info(String.format("Measured Statistics: %s", new ScalarStatistics().add(measuredGradient.getData())));
log.info(String.format("Gradient Error: %s", difference.prettyPrint()));
log.info(String.format("Error Statistics: %s", new ScalarStatistics().add(difference.getData())));
}
difference.freeRef();
throw e;
} finally {
measuredGradient.freeRef();
implementedGradient.freeRef();
}
}).reduce((a, b) -> a.combine(b)).map(x -> x.combine(prev)).orElseGet(() -> prev);
}
use of com.simiacryptus.util.data.ScalarStatistics in project MindsEye by SimiaCryptus.
the class StyleTransfer method measureStyle.
/**
* Measure style neural setup.
*
* @param style the style
* @return the neural setup
*/
public NeuralSetup measureStyle(final StyleSetup<T> style) {
NeuralSetup<T> self = new NeuralSetup(style);
List<CharSequence> keyList = style.styleImages.keySet().stream().collect(Collectors.toList());
Tensor contentInput = Tensor.fromRGB(style.contentImage);
List<Tensor> styleInputs = keyList.stream().map(x -> style.styleImages.get(x)).map(img -> Tensor.fromRGB(img)).collect(Collectors.toList());
IntStream.range(0, keyList.size()).forEach(i -> {
self.styleTargets.put(keyList.get(i), new StyleTarget());
});
self.contentTarget = new ContentTarget();
for (final T layerType : getLayerTypes()) {
System.gc();
final PipelineNetwork network = layerType.texture();
ArtistryUtil.setPrecision(network, style.precision);
Tensor content = network.eval(contentInput).getDataAndFree().getAndFree(0);
self.contentTarget.content.put(layerType, content);
logger.info(String.format("%s : target content = %s", layerType.name(), content.prettyPrint()));
logger.info(String.format("%s : content statistics = %s", layerType.name(), JsonUtil.toJson(new ScalarStatistics().add(content.getData()).getMetrics())));
for (int i = 0; i < styleInputs.size(); i++) {
Tensor styleInput = styleInputs.get(i);
CharSequence key = keyList.get(i);
StyleTarget<T> styleTarget = self.styleTargets.get(key);
if (0 == self.style.styles.entrySet().stream().filter(e1 -> e1.getKey().contains(key)).map(x -> (LayerStyleParams) x.getValue().params.get(layerType)).filter(x -> null != x).filter(x -> x.mean != 0 || x.cov != 0).count())
continue;
System.gc();
Tensor mean = ArtistryUtil.wrapTilesAvg(ArtistryUtil.avg(network.copy())).eval(styleInput).getDataAndFree().getAndFree(0);
styleTarget.mean.put(layerType, mean);
logger.info(String.format("%s : style mean = %s", layerType.name(), mean.prettyPrint()));
logger.info(String.format("%s : mean statistics = %s", layerType.name(), JsonUtil.toJson(new ScalarStatistics().add(mean.getData()).getMetrics())));
if (0 == self.style.styles.entrySet().stream().filter(e1 -> e1.getKey().contains(key)).map(x -> (LayerStyleParams) x.getValue().params.get(layerType)).filter(x -> null != x).filter(x -> x.cov != 0).count())
continue;
System.gc();
Tensor cov0 = ArtistryUtil.wrapTilesAvg(ArtistryUtil.gram(network.copy())).eval(styleInput).getDataAndFree().getAndFree(0);
Tensor cov1 = ArtistryUtil.wrapTilesAvg(ArtistryUtil.gram(network.copy(), mean)).eval(styleInput).getDataAndFree().getAndFree(0);
styleTarget.cov0.put(layerType, cov0);
styleTarget.cov1.put(layerType, cov1);
int featureBands = mean.getDimensions()[2];
int covarianceElements = cov1.getDimensions()[2];
int selectedBands = covarianceElements / featureBands;
logger.info(String.format("%s : target cov0 = %s", layerType.name(), cov0.reshapeCast(featureBands, selectedBands, 1).prettyPrint()));
logger.info(String.format("%s : cov0 statistics = %s", layerType.name(), JsonUtil.toJson(new ScalarStatistics().add(cov0.getData()).getMetrics())));
logger.info(String.format("%s : target cov1 = %s", layerType.name(), cov1.reshapeCast(featureBands, selectedBands, 1).prettyPrint()));
logger.info(String.format("%s : cov1 statistics = %s", layerType.name(), JsonUtil.toJson(new ScalarStatistics().add(cov1.getData()).getMetrics())));
}
}
return self;
}
Aggregations