use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.
the class RescaledSubnetLayer method eval.
@Nullable
@Override
public Result eval(@Nonnull final Result... inObj) {
assert 1 == inObj.length;
final TensorList batch = inObj[0].getData();
@Nonnull final int[] inputDims = batch.getDimensions();
assert 3 == inputDims.length;
if (1 == scale)
return subnetwork.eval(inObj);
@Nonnull final PipelineNetwork network = new PipelineNetwork();
@Nullable final DAGNode condensed = network.wrap(new ImgReshapeLayer(scale, scale, false));
network.wrap(new ImgConcatLayer(), IntStream.range(0, scale * scale).mapToObj(subband -> {
@Nonnull final int[] select = new int[inputDims[2]];
for (int i = 0; i < inputDims[2]; i++) {
select[i] = subband * inputDims[2] + i;
}
return network.add(subnetwork, network.wrap(new ImgBandSelectLayer(select), condensed));
}).toArray(i -> new DAGNode[i]));
network.wrap(new ImgReshapeLayer(scale, scale, true));
Result eval = network.eval(inObj);
network.freeRef();
return eval;
}
use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.
the class StochasticSamplingSubnetLayer method average.
/**
* Average result.
*
* @param samples the samples
* @return the result
*/
public static Result average(final Result[] samples) {
PipelineNetwork gateNetwork = new PipelineNetwork(1);
gateNetwork.wrap(new ProductLayer(), gateNetwork.getInput(0), gateNetwork.wrap(new ValueLayer(new Tensor(1, 1, 1).mapAndFree(v -> 1.0 / samples.length)), new DAGNode[] {}));
SumInputsLayer sumInputsLayer = new SumInputsLayer();
try {
return gateNetwork.evalAndFree(sumInputsLayer.evalAndFree(samples));
} finally {
sumInputsLayer.freeRef();
gateNetwork.freeRef();
}
}
use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.
the class FullyConnectedLayer method explode.
/**
* Explode pipeline network.
*
* @return the pipeline network
*/
@Nonnull
public Layer explode() {
int inputVol = Tensor.length(inputDims);
int outVol = Tensor.length(outputDims);
@Nonnull PipelineNetwork network = new PipelineNetwork(1);
network.wrap(new ReshapeLayer(1, 1, inputVol));
@Nullable Tensor tensor = this.weights.reshapeCast(1, 1, inputVol * outVol);
@Nonnull ConvolutionLayer convolutionLayer = new ConvolutionLayer(1, 1, inputVol, outVol).set(tensor).setBatchBands(getBatchBands());
@Nonnull ExplodedConvolutionGrid grid = convolutionLayer.getExplodedNetwork();
convolutionLayer.freeRef();
tensor.freeRef();
grid.add(network.getHead());
grid.freeRef();
network.wrap(new ReshapeLayer(outputDims));
network.setName(getName());
return network;
}
use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.
the class StochasticSamplingSubnetLayer method average.
/**
* Average result.
*
* @param samples the samples
* @param precision the precision
* @return the result
*/
public static Result average(final Result[] samples, final Precision precision) {
PipelineNetwork gateNetwork = new PipelineNetwork(1);
gateNetwork.wrap(new ProductLayer().setPrecision(precision), gateNetwork.getInput(0), gateNetwork.wrap(new ValueLayer(new Tensor(1, 1, 1).mapAndFree(v -> 1.0 / samples.length)), new DAGNode[] {}));
SumInputsLayer sumInputsLayer = new SumInputsLayer().setPrecision(precision);
try {
return gateNetwork.evalAndFree(sumInputsLayer.evalAndFree(samples));
} finally {
sumInputsLayer.freeRef();
gateNetwork.freeRef();
}
}
use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.
the class ExplodedConvolutionGrid method add.
/**
* Add dag node.
*
* @param input the input
* @return the dag node
*/
public DAGNode add(@Nonnull DAGNode input) {
assertAlive();
DAGNetwork network = input.getNetwork();
int defaultPaddingX = 0;
int defaultPaddingY = 0;
boolean customPaddingX = this.convolutionParams.paddingX != null && convolutionParams.paddingX != defaultPaddingX;
boolean customPaddingY = this.convolutionParams.paddingY != null && convolutionParams.paddingY != defaultPaddingY;
final DAGNode paddedInput;
if (customPaddingX || customPaddingY) {
int x;
if (this.convolutionParams.paddingX < -defaultPaddingX) {
x = this.convolutionParams.paddingX + defaultPaddingX;
} else if (this.convolutionParams.paddingX > defaultPaddingX) {
x = this.convolutionParams.paddingX - defaultPaddingX;
} else {
x = 0;
}
int y;
if (this.convolutionParams.paddingY < -defaultPaddingY) {
y = this.convolutionParams.paddingY + defaultPaddingY;
} else if (this.convolutionParams.paddingY > defaultPaddingY) {
y = this.convolutionParams.paddingY - defaultPaddingY;
} else {
y = 0;
}
if (x != 0 || y != 0) {
paddedInput = network.wrap(new ImgZeroPaddingLayer(x, y).setPrecision(convolutionParams.precision), input);
} else {
paddedInput = input;
}
} else {
paddedInput = input;
}
InnerNode output;
if (subLayers.size() == 1) {
output = (InnerNode) subLayers.get(0).add(paddedInput);
} else {
ImgLinearSubnetLayer linearSubnetLayer = new ImgLinearSubnetLayer();
subLayers.forEach(leg -> {
PipelineNetwork subnet = new PipelineNetwork();
leg.add(subnet.getHead());
linearSubnetLayer.add(leg.fromBand, leg.toBand, subnet);
});
boolean isParallel = CudaSettings.INSTANCE.isConv_para_1();
linearSubnetLayer.setPrecision(convolutionParams.precision).setParallel(isParallel);
output = network.wrap(linearSubnetLayer, paddedInput).setParallel(isParallel);
}
if (customPaddingX || customPaddingY) {
int x = !customPaddingX ? 0 : (this.convolutionParams.paddingX - defaultPaddingX);
int y = !customPaddingY ? 0 : (this.convolutionParams.paddingY - defaultPaddingY);
if (x > 0)
x = 0;
if (y > 0)
y = 0;
if (x != 0 || y != 0) {
return network.wrap(new ImgZeroPaddingLayer(x, y).setPrecision(convolutionParams.precision), output);
}
}
return output;
}
Aggregations