use of com.simiacryptus.mindseye.lang.TensorList in project MindsEye by SimiaCryptus.
the class CrossDifferenceLayer method eval.
@Nonnull
@Override
public Result eval(@Nonnull final Result... inObj) {
assert 1 == inObj.length;
Arrays.stream(inObj).forEach(nnResult -> nnResult.addRef());
return new Result(TensorArray.wrap(inObj[0].getData().stream().parallel().map(tensor -> {
final int inputDim = tensor.length();
final int outputDim = (inputDim * inputDim - inputDim) / 2;
@Nonnull final Tensor result = new Tensor(outputDim);
@Nullable final double[] inputData = tensor.getData();
@Nullable final double[] resultData = result.getData();
IntStream.range(0, inputDim).forEach(x -> {
IntStream.range(x + 1, inputDim).forEach(y -> {
resultData[CrossDifferenceLayer.index(x, y, inputDim)] = inputData[x] - inputData[y];
});
});
tensor.freeRef();
return result;
}).toArray(i -> new Tensor[i])), (@Nonnull final DeltaSet<Layer> buffer, @Nonnull final TensorList data) -> {
final Result input = inObj[0];
if (input.isAlive()) {
@Nonnull TensorArray tensorArray = TensorArray.wrap(data.stream().parallel().map(tensor -> {
final int outputDim = tensor.length();
final int inputDim = (1 + (int) Math.sqrt(1 + 8 * outputDim)) / 2;
@Nonnull final Tensor passback = new Tensor(inputDim);
@Nullable final double[] passbackData = passback.getData();
@Nullable final double[] tensorData = tensor.getData();
IntStream.range(0, inputDim).forEach(x -> {
IntStream.range(x + 1, inputDim).forEach(y -> {
passbackData[x] += tensorData[CrossDifferenceLayer.index(x, y, inputDim)];
passbackData[y] += -tensorData[CrossDifferenceLayer.index(x, y, inputDim)];
});
});
tensor.freeRef();
return passback;
}).toArray(i -> new Tensor[i]));
input.accumulate(buffer, tensorArray);
}
}) {
@Override
protected void _free() {
Arrays.stream(inObj).forEach(nnResult -> nnResult.freeRef());
}
@Override
public boolean isAlive() {
for (@Nonnull final Result element : inObj) if (element.isAlive()) {
return true;
}
return false;
}
};
}
use of com.simiacryptus.mindseye.lang.TensorList in project MindsEye by SimiaCryptus.
the class CrossProductLayer method eval.
@Nonnull
@Override
public Result eval(@Nonnull final Result... inObj) {
assert 1 == inObj.length;
final Result in = inObj[0];
TensorList indata = in.getData();
Arrays.stream(inObj).forEach(nnResult -> nnResult.addRef());
indata.addRef();
return new Result(TensorArray.wrap(indata.stream().parallel().map(tensor -> {
final int inputDim = tensor.length();
final int outputDim = (inputDim * inputDim - inputDim) / 2;
@Nonnull final Tensor result = new Tensor(outputDim);
@Nullable final double[] inputData = tensor.getData();
@Nullable final double[] resultData = result.getData();
IntStream.range(0, inputDim).forEach(x -> {
IntStream.range(x + 1, inputDim).forEach(y -> {
resultData[CrossProductLayer.index(x, y, inputDim)] = inputData[x] * inputData[y];
});
});
tensor.freeRef();
return result;
}).toArray(i -> new Tensor[i])), (@Nonnull final DeltaSet<Layer> buffer, @Nonnull final TensorList delta) -> {
if (in.isAlive()) {
assert delta.length() == delta.length();
@Nonnull TensorArray tensorArray = TensorArray.wrap(IntStream.range(0, delta.length()).parallel().mapToObj(batchIndex -> {
@Nullable final Tensor deltaTensor = delta.get(batchIndex);
final int outputDim = deltaTensor.length();
final int inputDim = (1 + (int) Math.sqrt(1 + 8 * outputDim)) / 2;
@Nonnull final Tensor passback = new Tensor(inputDim);
@Nullable final double[] passbackData = passback.getData();
@Nullable final double[] tensorData = deltaTensor.getData();
Tensor inputTensor = indata.get(batchIndex);
@Nullable final double[] inputData = inputTensor.getData();
IntStream.range(0, inputDim).forEach(x -> {
IntStream.range(x + 1, inputDim).forEach(y -> {
passbackData[x] += tensorData[CrossProductLayer.index(x, y, inputDim)] * inputData[y];
passbackData[y] += tensorData[CrossProductLayer.index(x, y, inputDim)] * inputData[x];
});
});
deltaTensor.freeRef();
inputTensor.freeRef();
return passback;
}).toArray(i -> new Tensor[i]));
in.accumulate(buffer, tensorArray);
}
}) {
@Override
protected void _free() {
indata.freeRef();
Arrays.stream(inObj).forEach(nnResult -> nnResult.freeRef());
}
@Override
public boolean isAlive() {
for (@Nonnull final Result element : inObj) if (element.isAlive()) {
return true;
}
return false;
}
};
}
use of com.simiacryptus.mindseye.lang.TensorList in project MindsEye by SimiaCryptus.
the class EntropyLossLayer method eval.
@Nonnull
@Override
public Result eval(@Nonnull final Result... inObj) {
Arrays.stream(inObj).forEach(nnResult -> nnResult.addRef());
final double zero_tol = 1e-12;
final Result in0 = inObj[0];
TensorList indata = in0.getData();
indata.addRef();
@Nonnull final Tensor[] gradient = new Tensor[indata.length()];
final double max_prob = 1.;
return new Result(TensorArray.wrap(IntStream.range(0, indata.length()).mapToObj(dataIndex -> {
@Nullable final Tensor l = indata.get(dataIndex);
@Nullable final Tensor r = inObj[1].getData().get(dataIndex);
assert l.length() == r.length() : l.length() + " != " + r.length();
@Nonnull final Tensor gradientTensor = new Tensor(l.getDimensions());
@Nullable final double[] gradientData = gradientTensor.getData();
double total = 0;
@Nullable final double[] ld = l.getData();
@Nullable final double[] rd = r.getData();
for (int i = 0; i < l.length(); i++) {
final double lv = Math.max(Math.min(ld[i], max_prob), zero_tol);
final double rv = rd[i];
if (rv > 0) {
gradientData[i] = -rv / lv;
total += -rv * Math.log(lv);
} else {
gradientData[i] = 0;
}
}
l.freeRef();
r.freeRef();
assert total >= 0;
gradient[dataIndex] = gradientTensor;
@Nonnull final Tensor outValue = new Tensor(new double[] { total }, 1);
return outValue;
}).toArray(i -> new Tensor[i])), (@Nonnull final DeltaSet<Layer> buffer, @Nonnull final TensorList delta) -> {
if (inObj[1].isAlive()) {
@Nonnull TensorArray tensorArray = TensorArray.wrap(IntStream.range(0, delta.length()).mapToObj(dataIndex -> {
Tensor deltaTensor = delta.get(dataIndex);
@Nullable final Tensor inputTensor = indata.get(dataIndex);
@Nonnull final Tensor passback = new Tensor(gradient[dataIndex].getDimensions());
for (int i = 0; i < passback.length(); i++) {
final double lv = Math.max(Math.min(inputTensor.get(i), max_prob), zero_tol);
passback.set(i, -deltaTensor.get(0) * Math.log(lv));
}
inputTensor.freeRef();
deltaTensor.freeRef();
return passback;
}).toArray(i -> new Tensor[i]));
inObj[1].accumulate(buffer, tensorArray);
}
if (in0.isAlive()) {
@Nonnull TensorArray tensorArray = TensorArray.wrap(IntStream.range(0, delta.length()).mapToObj(dataIndex -> {
Tensor tensor = delta.get(dataIndex);
@Nonnull final Tensor passback = new Tensor(gradient[dataIndex].getDimensions());
for (int i = 0; i < passback.length(); i++) {
passback.set(i, tensor.get(0) * gradient[dataIndex].get(i));
}
tensor.freeRef();
return passback;
}).toArray(i -> new Tensor[i]));
in0.accumulate(buffer, tensorArray);
}
}) {
@Override
protected void _free() {
indata.freeRef();
Arrays.stream(gradient).forEach(ReferenceCounting::freeRef);
Arrays.stream(inObj).forEach(ReferenceCounting::freeRef);
}
@Override
public boolean isAlive() {
return in0.isAlive() || in0.isAlive();
}
};
}
use of com.simiacryptus.mindseye.lang.TensorList in project MindsEye by SimiaCryptus.
the class TensorListTrainable method getNNContext.
/**
* Get nn context nn result [ ].
*
* @param data the data
* @param mask the mask
* @return the nn result [ ]
*/
public static Result[] getNNContext(@Nullable final TensorList[] data, @Nullable final boolean[] mask) {
if (null == data)
throw new IllegalArgumentException();
int inputs = data.length;
assert 0 < inputs;
int items = data[0].length();
assert 0 < items;
return IntStream.range(0, inputs).mapToObj(col -> {
final Tensor[] tensors = IntStream.range(0, items).mapToObj(row -> data[col].get(row)).toArray(i -> new Tensor[i]);
@Nonnull TensorArray tensorArray = TensorArray.create(tensors);
if (null == mask || col >= mask.length || !mask[col]) {
return new ConstantResult(tensorArray);
} else {
return new Result(tensorArray, (@Nonnull final DeltaSet<Layer> buffer, @Nonnull final TensorList delta) -> {
for (int index = 0; index < delta.length(); index++) {
final Tensor dt = delta.get(index);
@Nullable final double[] d = dt.getData();
final Tensor t = tensors[index];
@Nullable final double[] p = t.getData();
@Nonnull PlaceholderLayer<double[]> layer = new PlaceholderLayer<>(p);
buffer.get(layer, p).addInPlace(d).freeRef();
dt.freeRef();
layer.freeRef();
}
}) {
@Override
public boolean isAlive() {
return true;
}
};
}
}).toArray(x1 -> new Result[x1]);
}
use of com.simiacryptus.mindseye.lang.TensorList in project MindsEye by SimiaCryptus.
the class CudaLayerTester method compareLayerDerivatives.
/**
* Compare layer derivatives tolerance statistics.
*
* @param expected the expected
* @param actual the actual
* @return the tolerance statistics
*/
@Nullable
public ToleranceStatistics compareLayerDerivatives(final SimpleResult expected, final SimpleResult actual) {
@Nonnull final ToleranceStatistics derivativeAgreement = IntStream.range(0, getBatchSize()).mapToObj(batch -> {
@Nonnull Function<Layer, ToleranceStatistics> compareInputDerivative = input -> {
double[] b = actual.getLayerDerivative().getMap().get(input).getDelta();
double[] a = expected.getLayerDerivative().getMap().get(input).getDelta();
ToleranceStatistics statistics = new ToleranceStatistics().accumulate(a, b);
return statistics;
};
return Stream.concat(actual.getLayerDerivative().getMap().keySet().stream(), expected.getLayerDerivative().getMap().keySet().stream()).distinct().map(compareInputDerivative).reduce((a, b) -> a.combine(b));
}).filter(x -> x.isPresent()).map(x -> x.get()).reduce((a, b) -> a.combine(b)).orElse(null);
if (null != derivativeAgreement && !(derivativeAgreement.absoluteTol.getMax() < tolerance)) {
logger.info("Expected Derivative: " + Arrays.stream(expected.getInputDerivative()).flatMap(TensorList::stream).map(x -> {
String str = x.prettyPrint();
x.freeRef();
return str;
}).collect(Collectors.toList()));
logger.info("Actual Derivative: " + Arrays.stream(actual.getInputDerivative()).flatMap(TensorList::stream).map(x -> {
String str = x.prettyPrint();
x.freeRef();
return str;
}).collect(Collectors.toList()));
throw new AssertionError("Layer Derivatives Corrupt: " + derivativeAgreement);
}
return derivativeAgreement;
}
Aggregations