Search in sources :

Example 21 with PipelineNetwork

use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.

the class RescaledSubnetLayer method eval.

@Nullable
@Override
public Result eval(@Nonnull final Result... inObj) {
    assert 1 == inObj.length;
    final TensorList batch = inObj[0].getData();
    @Nonnull final int[] inputDims = batch.getDimensions();
    assert 3 == inputDims.length;
    if (1 == scale)
        return subnetwork.eval(inObj);
    @Nonnull final PipelineNetwork network = new PipelineNetwork();
    @Nullable final DAGNode condensed = network.wrap(new ImgReshapeLayer(scale, scale, false));
    network.wrap(new ImgConcatLayer(), IntStream.range(0, scale * scale).mapToObj(subband -> {
        @Nonnull final int[] select = new int[inputDims[2]];
        for (int i = 0; i < inputDims[2]; i++) {
            select[i] = subband * inputDims[2] + i;
        }
        return network.add(subnetwork, network.wrap(new ImgBandSelectLayer(select), condensed));
    }).toArray(i -> new DAGNode[i]));
    network.wrap(new ImgReshapeLayer(scale, scale, true));
    Result eval = network.eval(inObj);
    network.freeRef();
    return eval;
}
Also used : PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) IntStream(java.util.stream.IntStream) JsonObject(com.google.gson.JsonObject) Result(com.simiacryptus.mindseye.lang.Result) DAGNode(com.simiacryptus.mindseye.network.DAGNode) DataSerializer(com.simiacryptus.mindseye.lang.DataSerializer) ArrayList(java.util.ArrayList) List(java.util.List) LayerBase(com.simiacryptus.mindseye.lang.LayerBase) TensorList(com.simiacryptus.mindseye.lang.TensorList) Map(java.util.Map) ImgConcatLayer(com.simiacryptus.mindseye.layers.cudnn.ImgConcatLayer) Layer(com.simiacryptus.mindseye.lang.Layer) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) Nonnull(javax.annotation.Nonnull) ImgConcatLayer(com.simiacryptus.mindseye.layers.cudnn.ImgConcatLayer) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) TensorList(com.simiacryptus.mindseye.lang.TensorList) DAGNode(com.simiacryptus.mindseye.network.DAGNode) Nullable(javax.annotation.Nullable) Result(com.simiacryptus.mindseye.lang.Result) Nullable(javax.annotation.Nullable)

Example 22 with PipelineNetwork

use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.

the class StochasticSamplingSubnetLayer method average.

/**
 * Average result.
 *
 * @param samples the samples
 * @return the result
 */
public static Result average(final Result[] samples) {
    PipelineNetwork gateNetwork = new PipelineNetwork(1);
    gateNetwork.wrap(new ProductLayer(), gateNetwork.getInput(0), gateNetwork.wrap(new ValueLayer(new Tensor(1, 1, 1).mapAndFree(v -> 1.0 / samples.length)), new DAGNode[] {}));
    SumInputsLayer sumInputsLayer = new SumInputsLayer();
    try {
        return gateNetwork.evalAndFree(sumInputsLayer.evalAndFree(samples));
    } finally {
        sumInputsLayer.freeRef();
        gateNetwork.freeRef();
    }
}
Also used : PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) IntStream(java.util.stream.IntStream) JsonObject(com.google.gson.JsonObject) Arrays(java.util.Arrays) CountingResult(com.simiacryptus.mindseye.network.CountingResult) SumInputsLayer(com.simiacryptus.mindseye.layers.cudnn.SumInputsLayer) Tensor(com.simiacryptus.mindseye.lang.Tensor) Random(java.util.Random) Result(com.simiacryptus.mindseye.lang.Result) DAGNode(com.simiacryptus.mindseye.network.DAGNode) DataSerializer(com.simiacryptus.mindseye.lang.DataSerializer) ArrayList(java.util.ArrayList) List(java.util.List) LayerBase(com.simiacryptus.mindseye.lang.LayerBase) ProductLayer(com.simiacryptus.mindseye.layers.cudnn.ProductLayer) Map(java.util.Map) Layer(com.simiacryptus.mindseye.lang.Layer) DAGNetwork(com.simiacryptus.mindseye.network.DAGNetwork) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) ProductLayer(com.simiacryptus.mindseye.layers.cudnn.ProductLayer) Tensor(com.simiacryptus.mindseye.lang.Tensor) SumInputsLayer(com.simiacryptus.mindseye.layers.cudnn.SumInputsLayer) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) DAGNode(com.simiacryptus.mindseye.network.DAGNode)

Example 23 with PipelineNetwork

use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.

the class FullyConnectedLayer method explode.

/**
 * Explode pipeline network.
 *
 * @return the pipeline network
 */
@Nonnull
public Layer explode() {
    int inputVol = Tensor.length(inputDims);
    int outVol = Tensor.length(outputDims);
    @Nonnull PipelineNetwork network = new PipelineNetwork(1);
    network.wrap(new ReshapeLayer(1, 1, inputVol));
    @Nullable Tensor tensor = this.weights.reshapeCast(1, 1, inputVol * outVol);
    @Nonnull ConvolutionLayer convolutionLayer = new ConvolutionLayer(1, 1, inputVol, outVol).set(tensor).setBatchBands(getBatchBands());
    @Nonnull ExplodedConvolutionGrid grid = convolutionLayer.getExplodedNetwork();
    convolutionLayer.freeRef();
    tensor.freeRef();
    grid.add(network.getHead());
    grid.freeRef();
    network.wrap(new ReshapeLayer(outputDims));
    network.setName(getName());
    return network;
}
Also used : Tensor(com.simiacryptus.mindseye.lang.Tensor) Nonnull(javax.annotation.Nonnull) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) ReshapeLayer(com.simiacryptus.mindseye.layers.java.ReshapeLayer) Nullable(javax.annotation.Nullable) Nonnull(javax.annotation.Nonnull)

Example 24 with PipelineNetwork

use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.

the class StochasticSamplingSubnetLayer method average.

/**
 * Average result.
 *
 * @param samples   the samples
 * @param precision the precision
 * @return the result
 */
public static Result average(final Result[] samples, final Precision precision) {
    PipelineNetwork gateNetwork = new PipelineNetwork(1);
    gateNetwork.wrap(new ProductLayer().setPrecision(precision), gateNetwork.getInput(0), gateNetwork.wrap(new ValueLayer(new Tensor(1, 1, 1).mapAndFree(v -> 1.0 / samples.length)), new DAGNode[] {}));
    SumInputsLayer sumInputsLayer = new SumInputsLayer().setPrecision(precision);
    try {
        return gateNetwork.evalAndFree(sumInputsLayer.evalAndFree(samples));
    } finally {
        sumInputsLayer.freeRef();
        gateNetwork.freeRef();
    }
}
Also used : PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) IntStream(java.util.stream.IntStream) JsonObject(com.google.gson.JsonObject) Arrays(java.util.Arrays) StochasticComponent(com.simiacryptus.mindseye.layers.java.StochasticComponent) CountingResult(com.simiacryptus.mindseye.network.CountingResult) Tensor(com.simiacryptus.mindseye.lang.Tensor) Random(java.util.Random) WrapperLayer(com.simiacryptus.mindseye.layers.java.WrapperLayer) Result(com.simiacryptus.mindseye.lang.Result) ValueLayer(com.simiacryptus.mindseye.layers.java.ValueLayer) DAGNode(com.simiacryptus.mindseye.network.DAGNode) DataSerializer(com.simiacryptus.mindseye.lang.DataSerializer) Precision(com.simiacryptus.mindseye.lang.cudnn.Precision) Map(java.util.Map) Layer(com.simiacryptus.mindseye.lang.Layer) DAGNetwork(com.simiacryptus.mindseye.network.DAGNetwork) Nonnull(javax.annotation.Nonnull) Nullable(javax.annotation.Nullable) Tensor(com.simiacryptus.mindseye.lang.Tensor) ValueLayer(com.simiacryptus.mindseye.layers.java.ValueLayer) PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) DAGNode(com.simiacryptus.mindseye.network.DAGNode)

Example 25 with PipelineNetwork

use of com.simiacryptus.mindseye.network.PipelineNetwork in project MindsEye by SimiaCryptus.

the class ExplodedConvolutionGrid method add.

/**
 * Add dag node.
 *
 * @param input the input
 * @return the dag node
 */
public DAGNode add(@Nonnull DAGNode input) {
    assertAlive();
    DAGNetwork network = input.getNetwork();
    int defaultPaddingX = 0;
    int defaultPaddingY = 0;
    boolean customPaddingX = this.convolutionParams.paddingX != null && convolutionParams.paddingX != defaultPaddingX;
    boolean customPaddingY = this.convolutionParams.paddingY != null && convolutionParams.paddingY != defaultPaddingY;
    final DAGNode paddedInput;
    if (customPaddingX || customPaddingY) {
        int x;
        if (this.convolutionParams.paddingX < -defaultPaddingX) {
            x = this.convolutionParams.paddingX + defaultPaddingX;
        } else if (this.convolutionParams.paddingX > defaultPaddingX) {
            x = this.convolutionParams.paddingX - defaultPaddingX;
        } else {
            x = 0;
        }
        int y;
        if (this.convolutionParams.paddingY < -defaultPaddingY) {
            y = this.convolutionParams.paddingY + defaultPaddingY;
        } else if (this.convolutionParams.paddingY > defaultPaddingY) {
            y = this.convolutionParams.paddingY - defaultPaddingY;
        } else {
            y = 0;
        }
        if (x != 0 || y != 0) {
            paddedInput = network.wrap(new ImgZeroPaddingLayer(x, y).setPrecision(convolutionParams.precision), input);
        } else {
            paddedInput = input;
        }
    } else {
        paddedInput = input;
    }
    InnerNode output;
    if (subLayers.size() == 1) {
        output = (InnerNode) subLayers.get(0).add(paddedInput);
    } else {
        ImgLinearSubnetLayer linearSubnetLayer = new ImgLinearSubnetLayer();
        subLayers.forEach(leg -> {
            PipelineNetwork subnet = new PipelineNetwork();
            leg.add(subnet.getHead());
            linearSubnetLayer.add(leg.fromBand, leg.toBand, subnet);
        });
        boolean isParallel = CudaSettings.INSTANCE.isConv_para_1();
        linearSubnetLayer.setPrecision(convolutionParams.precision).setParallel(isParallel);
        output = network.wrap(linearSubnetLayer, paddedInput).setParallel(isParallel);
    }
    if (customPaddingX || customPaddingY) {
        int x = !customPaddingX ? 0 : (this.convolutionParams.paddingX - defaultPaddingX);
        int y = !customPaddingY ? 0 : (this.convolutionParams.paddingY - defaultPaddingY);
        if (x > 0)
            x = 0;
        if (y > 0)
            y = 0;
        if (x != 0 || y != 0) {
            return network.wrap(new ImgZeroPaddingLayer(x, y).setPrecision(convolutionParams.precision), output);
        }
    }
    return output;
}
Also used : PipelineNetwork(com.simiacryptus.mindseye.network.PipelineNetwork) DAGNetwork(com.simiacryptus.mindseye.network.DAGNetwork) DAGNode(com.simiacryptus.mindseye.network.DAGNode) InnerNode(com.simiacryptus.mindseye.network.InnerNode)

Aggregations

PipelineNetwork (com.simiacryptus.mindseye.network.PipelineNetwork)33 Nonnull (javax.annotation.Nonnull)29 Tensor (com.simiacryptus.mindseye.lang.Tensor)16 DAGNode (com.simiacryptus.mindseye.network.DAGNode)13 Nullable (javax.annotation.Nullable)12 ArrayList (java.util.ArrayList)11 StepRecord (com.simiacryptus.mindseye.test.StepRecord)10 Layer (com.simiacryptus.mindseye.lang.Layer)9 Arrays (java.util.Arrays)9 List (java.util.List)9 ArrayTrainable (com.simiacryptus.mindseye.eval.ArrayTrainable)8 DAGNetwork (com.simiacryptus.mindseye.network.DAGNetwork)8 IntStream (java.util.stream.IntStream)8 MeanSqLossLayer (com.simiacryptus.mindseye.layers.java.MeanSqLossLayer)7 TestUtil (com.simiacryptus.mindseye.test.TestUtil)7 NotebookOutput (com.simiacryptus.util.io.NotebookOutput)7 Map (java.util.Map)7 Trainable (com.simiacryptus.mindseye.eval.Trainable)6 IterativeTrainer (com.simiacryptus.mindseye.opt.IterativeTrainer)6 BufferedImage (java.awt.image.BufferedImage)6