Search in sources :

Example 6 with InputType

use of org.deeplearning4j.nn.conf.inputs.InputType in project deeplearning4j by deeplearning4j.

the class DropoutLayerTest method testInputTypes.

@Test
public void testInputTypes() {
    DropoutLayer config = new DropoutLayer.Builder(0.5).build();
    InputType in1 = InputType.feedForward(20);
    InputType in2 = InputType.convolutional(28, 28, 1);
    assertEquals(in1, config.getOutputType(0, in1));
    assertEquals(in2, config.getOutputType(0, in2));
    assertNull(config.getPreProcessorForInputType(in1));
    assertNull(config.getPreProcessorForInputType(in2));
}
Also used : InputType(org.deeplearning4j.nn.conf.inputs.InputType) DropoutLayer(org.deeplearning4j.nn.conf.layers.DropoutLayer) Test(org.junit.Test)

Example 7 with InputType

use of org.deeplearning4j.nn.conf.inputs.InputType in project deeplearning4j by deeplearning4j.

the class KerasSequentialModel method getMultiLayerConfiguration.

/**
     * Configure a MultiLayerConfiguration from this Keras Sequential model configuration.
     *
     * @return          MultiLayerConfiguration
     */
public MultiLayerConfiguration getMultiLayerConfiguration() throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
    if (!this.className.equals(MODEL_CLASS_NAME_SEQUENTIAL))
        throw new InvalidKerasConfigurationException("Keras model class name " + this.className + " incompatible with MultiLayerNetwork");
    if (this.inputLayerNames.size() != 1)
        throw new InvalidKerasConfigurationException("MultiLayeNetwork expects only 1 input (found " + this.inputLayerNames.size() + ")");
    if (this.outputLayerNames.size() != 1)
        throw new InvalidKerasConfigurationException("MultiLayeNetwork expects only 1 output (found " + this.outputLayerNames.size() + ")");
    NeuralNetConfiguration.Builder modelBuilder = new NeuralNetConfiguration.Builder();
    NeuralNetConfiguration.ListBuilder listBuilder = modelBuilder.list();
    /* Add layers one at a time. */
    KerasLayer prevLayer = null;
    int layerIndex = 0;
    for (KerasLayer layer : this.layersOrdered) {
        if (layer.usesRegularization())
            modelBuilder.setUseRegularization(true);
        if (layer.isLayer()) {
            int nbInbound = layer.getInboundLayerNames().size();
            if (nbInbound != 1)
                throw new InvalidKerasConfigurationException("Layers in MultiLayerConfiguration must have exactly one inbound layer (found " + nbInbound + " for layer " + layer.getLayerName() + ")");
            if (prevLayer != null) {
                InputType[] inputTypes = new InputType[1];
                InputPreProcessor preprocessor = null;
                if (prevLayer.isInputPreProcessor()) {
                    inputTypes[0] = this.outputTypes.get(prevLayer.getInboundLayerNames().get(0));
                    preprocessor = prevLayer.getInputPreprocessor(inputTypes);
                } else {
                    inputTypes[0] = this.outputTypes.get(prevLayer.getLayerName());
                    preprocessor = layer.getInputPreprocessor(inputTypes);
                }
                if (preprocessor != null)
                    listBuilder.inputPreProcessor(layerIndex, preprocessor);
            }
            listBuilder.layer(layerIndex++, layer.getLayer());
            if (this.outputLayerNames.contains(layer.getLayerName()) && !(layer.getLayer() instanceof IOutputLayer))
                log.warn("Model cannot be trained: output layer " + layer.getLayerName() + " is not an IOutputLayer (no loss function specified)");
        } else if (layer.getVertex() != null)
            throw new InvalidKerasConfigurationException("Cannot add vertex to MultiLayerConfiguration (class name " + layer.getClassName() + ", layer name " + layer.getLayerName() + ")");
        prevLayer = layer;
    }
    InputType inputType = this.layersOrdered.get(0).getOutputType();
    if (inputType != null)
        listBuilder.setInputType(inputType);
    /* Whether to use standard backprop (or BPTT) or truncated BPTT. */
    if (this.useTruncatedBPTT && this.truncatedBPTT > 0)
        listBuilder.backpropType(BackpropType.TruncatedBPTT).tBPTTForwardLength(truncatedBPTT).tBPTTBackwardLength(truncatedBPTT);
    else
        listBuilder.backpropType(BackpropType.Standard);
    return listBuilder.build();
}
Also used : InputType(org.deeplearning4j.nn.conf.inputs.InputType) InputPreProcessor(org.deeplearning4j.nn.conf.InputPreProcessor) NeuralNetConfiguration(org.deeplearning4j.nn.conf.NeuralNetConfiguration) IOutputLayer(org.deeplearning4j.nn.api.layers.IOutputLayer)

Example 8 with InputType

use of org.deeplearning4j.nn.conf.inputs.InputType in project deeplearning4j by deeplearning4j.

the class KerasModel method helperInferOutputTypes.

/**
     * Helper method called from constructor. Infers and records output type
     * for every layer.
     */
protected void helperInferOutputTypes() throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
    this.outputTypes = new HashMap<String, InputType>();
    for (KerasLayer layer : this.layersOrdered) {
        InputType outputType = null;
        if (layer instanceof KerasInput) {
            outputType = layer.getOutputType();
            /*
                 * TODO: figure out how to infer truncated BPTT value for non-sequence inputs
                 *
                 * In Keras, truncated BPTT is specified implicitly by specifying a fixed
                 * size input and by passing in the "unroll" argument to recurrent layers.
                 * Currently, the only setting in which we can confidently determine the
                 * value of truncated BPTT is if the original input has two dimensions,
                 * the first of which is sequence length. Hypothetically, we should be
                 * able to do this for other types of inputs, but that's less straightforward.
                 */
            this.truncatedBPTT = ((KerasInput) layer).getTruncatedBptt();
        } else {
            InputType[] inputTypes = new InputType[layer.getInboundLayerNames().size()];
            int i = 0;
            for (String inboundLayerName : layer.getInboundLayerNames()) inputTypes[i++] = this.outputTypes.get(inboundLayerName);
            outputType = layer.getOutputType(inputTypes);
        }
        this.outputTypes.put(layer.getLayerName(), outputType);
    }
}
Also used : InputType(org.deeplearning4j.nn.conf.inputs.InputType) KerasInput(org.deeplearning4j.nn.modelimport.keras.layers.KerasInput)

Aggregations

InputType (org.deeplearning4j.nn.conf.inputs.InputType)8 Test (org.junit.Test)3 IOutputLayer (org.deeplearning4j.nn.api.layers.IOutputLayer)2 InputPreProcessor (org.deeplearning4j.nn.conf.InputPreProcessor)2 NeuralNetConfiguration (org.deeplearning4j.nn.conf.NeuralNetConfiguration)2 DL4JException (org.deeplearning4j.exception.DL4JException)1 ComputationGraphConfiguration (org.deeplearning4j.nn.conf.ComputationGraphConfiguration)1 GraphVertex (org.deeplearning4j.nn.conf.graph.GraphVertex)1 LayerVertex (org.deeplearning4j.nn.conf.graph.LayerVertex)1 PreprocessorVertex (org.deeplearning4j.nn.conf.graph.PreprocessorVertex)1 InvalidInputTypeException (org.deeplearning4j.nn.conf.inputs.InvalidInputTypeException)1 ConvolutionLayer (org.deeplearning4j.nn.conf.layers.ConvolutionLayer)1 DropoutLayer (org.deeplearning4j.nn.conf.layers.DropoutLayer)1 Layer (org.deeplearning4j.nn.conf.layers.Layer)1 OutputLayer (org.deeplearning4j.nn.conf.layers.OutputLayer)1 KerasInput (org.deeplearning4j.nn.modelimport.keras.layers.KerasInput)1 INDArray (org.nd4j.linalg.api.ndarray.INDArray)1