use of org.deeplearning4j.nn.modelimport.keras.UnsupportedKerasConfigurationException in project deeplearning4j by deeplearning4j.
the class KerasLstm method getRecurrentWeightInitFromConfig.
/**
* Get LSTM recurrent weight initialization from Keras layer configuration.
*
* @param layerConfig dictionary containing Keras layer configuration
* @return epsilon
* @throws InvalidKerasConfigurationException
*/
public static WeightInit getRecurrentWeightInitFromConfig(Map<String, Object> layerConfig, boolean train) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
Map<String, Object> innerConfig = getInnerLayerConfigFromConfig(layerConfig);
if (!innerConfig.containsKey(LAYER_FIELD_INNER_INIT))
throw new InvalidKerasConfigurationException("Keras LSTM layer config missing " + LAYER_FIELD_INNER_INIT + " field");
String kerasInit = (String) innerConfig.get(LAYER_FIELD_INNER_INIT);
WeightInit init;
try {
init = mapWeightInitialization(kerasInit);
} catch (UnsupportedKerasConfigurationException e) {
if (train)
throw e;
else {
init = WeightInit.XAVIER;
log.warn("Unknown weight initializer " + kerasInit + " (Using XAVIER instead).");
}
}
return init;
}
use of org.deeplearning4j.nn.modelimport.keras.UnsupportedKerasConfigurationException in project deeplearning4j by deeplearning4j.
the class KerasLstm method getForgetBiasInitFromConfig.
/**
* Get LSTM forget gate bias initialization from Keras layer configuration.
*
* @param layerConfig dictionary containing Keras layer configuration
* @return epsilon
* @throws InvalidKerasConfigurationException
*/
public static double getForgetBiasInitFromConfig(Map<String, Object> layerConfig, boolean train) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
Map<String, Object> innerConfig = getInnerLayerConfigFromConfig(layerConfig);
if (!innerConfig.containsKey(LAYER_FIELD_FORGET_BIAS_INIT))
throw new InvalidKerasConfigurationException("Keras LSTM layer config missing " + LAYER_FIELD_FORGET_BIAS_INIT + " field");
String kerasForgetBiasInit = (String) innerConfig.get(LAYER_FIELD_FORGET_BIAS_INIT);
double init = 0;
switch(kerasForgetBiasInit) {
case LSTM_FORGET_BIAS_INIT_ZERO:
init = 0.0;
break;
case LSTM_FORGET_BIAS_INIT_ONE:
init = 1.0;
break;
default:
if (train)
throw new UnsupportedKerasConfigurationException("Unsupported LSTM forget gate bias initialization: " + kerasForgetBiasInit);
else {
init = 1.0;
log.warn("Unsupported LSTM forget gate bias initialization: " + kerasForgetBiasInit + " (using 1 instead)");
}
break;
}
return init;
}
use of org.deeplearning4j.nn.modelimport.keras.UnsupportedKerasConfigurationException in project deeplearning4j by deeplearning4j.
the class KerasBatchNormalization method getBatchNormMode.
/**
* Get BatchNormalization "mode" from Keras layer configuration. Most modes currently unsupported.
*
* @param layerConfig dictionary containing Keras layer configuration
* @return
* @throws InvalidKerasConfigurationException
*/
protected int getBatchNormMode(Map<String, Object> layerConfig, boolean enforceTrainingConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
Map<String, Object> innerConfig = getInnerLayerConfigFromConfig(layerConfig);
if (!innerConfig.containsKey(LAYER_FIELD_MODE))
throw new InvalidKerasConfigurationException("Keras BatchNorm layer config missing " + LAYER_FIELD_MODE + " field");
int batchNormMode = (int) innerConfig.get(LAYER_FIELD_MODE);
switch(batchNormMode) {
case LAYER_BATCHNORM_MODE_1:
throw new UnsupportedKerasConfigurationException("Keras BatchNormalization mode " + LAYER_BATCHNORM_MODE_1 + " (sample-wise) not supported");
case LAYER_BATCHNORM_MODE_2:
throw new UnsupportedKerasConfigurationException("Keras BatchNormalization (per-batch statistics during testing) " + LAYER_BATCHNORM_MODE_2 + " not supported");
}
return batchNormMode;
}
use of org.deeplearning4j.nn.modelimport.keras.UnsupportedKerasConfigurationException in project deeplearning4j by deeplearning4j.
the class KerasMerge method getMergeMode.
public ElementWiseVertex.Op getMergeMode(Map<String, Object> layerConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
Map<String, Object> innerConfig = getInnerLayerConfigFromConfig(layerConfig);
if (!innerConfig.containsKey(LAYER_FIELD_MODE))
throw new InvalidKerasConfigurationException("Keras Merge layer config missing " + LAYER_FIELD_MODE + " field");
ElementWiseVertex.Op op = null;
String mergeMode = (String) innerConfig.get(LAYER_FIELD_MODE);
switch(mergeMode) {
case LAYER_MERGE_MODE_SUM:
op = ElementWiseVertex.Op.Add;
break;
case LAYER_MERGE_MODE_MUL:
op = ElementWiseVertex.Op.Product;
break;
case LAYER_MERGE_MODE_CONCAT:
// leave null
break;
case LAYER_MERGE_MODE_AVE:
case LAYER_MERGE_MODE_COS:
case LAYER_MERGE_MODE_DOT:
case LAYER_MERGE_MODE_MAX:
default:
throw new UnsupportedKerasConfigurationException("Keras Merge layer mode " + mergeMode + " not supported");
}
return op;
}
use of org.deeplearning4j.nn.modelimport.keras.UnsupportedKerasConfigurationException in project deeplearning4j by deeplearning4j.
the class KerasZeroPadding method getPaddingFromConfig.
/**
* Get zero padding from Keras layer configuration.
*
* @param layerConfig dictionary containing Keras layer configuration
* @return
* @throws InvalidKerasConfigurationException
*/
public int[] getPaddingFromConfig(Map<String, Object> layerConfig) throws InvalidKerasConfigurationException, UnsupportedKerasConfigurationException {
Map<String, Object> innerConfig = getInnerLayerConfigFromConfig(layerConfig);
if (!innerConfig.containsKey(LAYER_FIELD_PADDING))
throw new InvalidKerasConfigurationException("Field " + LAYER_FIELD_PADDING + " not found in Keras ZeroPadding layer");
List<Integer> paddingList = (List<Integer>) innerConfig.get(LAYER_FIELD_PADDING);
switch(this.className) {
case LAYER_CLASS_NAME_ZERO_PADDING_2D:
if (paddingList.size() == 2) {
paddingList.add(paddingList.get(1));
paddingList.add(1, paddingList.get(0));
}
if (paddingList.size() != 4)
throw new InvalidKerasConfigurationException("Found Keras ZeroPadding2D layer with invalid " + paddingList.size() + "D padding.");
break;
case LAYER_CLASS_NAME_ZERO_PADDING_1D:
throw new UnsupportedKerasConfigurationException("Keras ZeroPadding1D layer not supported");
default:
throw new UnsupportedKerasConfigurationException("Keras " + this.className + " padding layer not supported");
}
int[] padding = new int[paddingList.size()];
for (int i = 0; i < paddingList.size(); i++) padding[i] = paddingList.get(i);
return padding;
}
Aggregations