use of org.deeplearning4j.nn.conf.inputs.InputType in project deeplearning4j by deeplearning4j.
the class DropoutLayerTest method testInputTypes.
@Test
public void testInputTypes() {
DropoutLayer config = new DropoutLayer.Builder(0.5).build();
InputType in1 = InputType.feedForward(20);
InputType in2 = InputType.convolutional(28, 28, 1);
assertEquals(in1, config.getOutputType(0, in1));
assertEquals(in2, config.getOutputType(0, in2));
assertNull(config.getPreProcessorForInputType(in1));
assertNull(config.getPreProcessorForInputType(in2));
}
use of org.deeplearning4j.nn.conf.inputs.InputType in project deeplearning4j by deeplearning4j.
the class KerasSequentialModel method getMultiLayerConfiguration.
/**
* Configure a MultiLayerConfiguration from this Keras Sequential model configuration.
*
* @return MultiLayerConfiguration
*/
public MultiLayerConfiguration getMultiLayerConfiguration() throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
if (!this.className.equals(MODEL_CLASS_NAME_SEQUENTIAL))
throw new InvalidKerasConfigurationException("Keras model class name " + this.className + " incompatible with MultiLayerNetwork");
if (this.inputLayerNames.size() != 1)
throw new InvalidKerasConfigurationException("MultiLayeNetwork expects only 1 input (found " + this.inputLayerNames.size() + ")");
if (this.outputLayerNames.size() != 1)
throw new InvalidKerasConfigurationException("MultiLayeNetwork expects only 1 output (found " + this.outputLayerNames.size() + ")");
NeuralNetConfiguration.Builder modelBuilder = new NeuralNetConfiguration.Builder();
NeuralNetConfiguration.ListBuilder listBuilder = modelBuilder.list();
/* Add layers one at a time. */
KerasLayer prevLayer = null;
int layerIndex = 0;
for (KerasLayer layer : this.layersOrdered) {
if (layer.usesRegularization())
modelBuilder.setUseRegularization(true);
if (layer.isLayer()) {
int nbInbound = layer.getInboundLayerNames().size();
if (nbInbound != 1)
throw new InvalidKerasConfigurationException("Layers in MultiLayerConfiguration must have exactly one inbound layer (found " + nbInbound + " for layer " + layer.getLayerName() + ")");
if (prevLayer != null) {
InputType[] inputTypes = new InputType[1];
InputPreProcessor preprocessor = null;
if (prevLayer.isInputPreProcessor()) {
inputTypes[0] = this.outputTypes.get(prevLayer.getInboundLayerNames().get(0));
preprocessor = prevLayer.getInputPreprocessor(inputTypes);
} else {
inputTypes[0] = this.outputTypes.get(prevLayer.getLayerName());
preprocessor = layer.getInputPreprocessor(inputTypes);
}
if (preprocessor != null)
listBuilder.inputPreProcessor(layerIndex, preprocessor);
}
listBuilder.layer(layerIndex++, layer.getLayer());
if (this.outputLayerNames.contains(layer.getLayerName()) && !(layer.getLayer() instanceof IOutputLayer))
log.warn("Model cannot be trained: output layer " + layer.getLayerName() + " is not an IOutputLayer (no loss function specified)");
} else if (layer.getVertex() != null)
throw new InvalidKerasConfigurationException("Cannot add vertex to MultiLayerConfiguration (class name " + layer.getClassName() + ", layer name " + layer.getLayerName() + ")");
prevLayer = layer;
}
InputType inputType = this.layersOrdered.get(0).getOutputType();
if (inputType != null)
listBuilder.setInputType(inputType);
/* Whether to use standard backprop (or BPTT) or truncated BPTT. */
if (this.useTruncatedBPTT && this.truncatedBPTT > 0)
listBuilder.backpropType(BackpropType.TruncatedBPTT).tBPTTForwardLength(truncatedBPTT).tBPTTBackwardLength(truncatedBPTT);
else
listBuilder.backpropType(BackpropType.Standard);
return listBuilder.build();
}
use of org.deeplearning4j.nn.conf.inputs.InputType in project deeplearning4j by deeplearning4j.
the class KerasModel method helperInferOutputTypes.
/**
* Helper method called from constructor. Infers and records output type
* for every layer.
*/
protected void helperInferOutputTypes() throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
this.outputTypes = new HashMap<String, InputType>();
for (KerasLayer layer : this.layersOrdered) {
InputType outputType = null;
if (layer instanceof KerasInput) {
outputType = layer.getOutputType();
/*
* TODO: figure out how to infer truncated BPTT value for non-sequence inputs
*
* In Keras, truncated BPTT is specified implicitly by specifying a fixed
* size input and by passing in the "unroll" argument to recurrent layers.
* Currently, the only setting in which we can confidently determine the
* value of truncated BPTT is if the original input has two dimensions,
* the first of which is sequence length. Hypothetically, we should be
* able to do this for other types of inputs, but that's less straightforward.
*/
this.truncatedBPTT = ((KerasInput) layer).getTruncatedBptt();
} else {
InputType[] inputTypes = new InputType[layer.getInboundLayerNames().size()];
int i = 0;
for (String inboundLayerName : layer.getInboundLayerNames()) inputTypes[i++] = this.outputTypes.get(inboundLayerName);
outputType = layer.getOutputType(inputTypes);
}
this.outputTypes.put(layer.getLayerName(), outputType);
}
}
Aggregations