Search in sources :

Example 1 with ActivationReLU

use of ml.shifu.shifu.core.dtrain.nn.ActivationReLU in project shifu by ShifuML.

the class PersistBasicFloatNetwork method readNetwork.

public BasicFloatNetwork readNetwork(final DataInput in) throws IOException {
    final BasicFloatNetwork result = new BasicFloatNetwork();
    final FlatNetwork flat = new FlatNetwork();
    // read properties
    Map<String, String> properties = new HashMap<String, String>();
    int size = in.readInt();
    for (int i = 0; i < size; i++) {
        properties.put(ml.shifu.shifu.core.dtrain.StringUtils.readString(in), ml.shifu.shifu.core.dtrain.StringUtils.readString(in));
    }
    result.getProperties().putAll(properties);
    // read fields
    flat.setBeginTraining(in.readInt());
    flat.setConnectionLimit(in.readDouble());
    flat.setContextTargetOffset(readIntArray(in));
    flat.setContextTargetSize(readIntArray(in));
    flat.setEndTraining(in.readInt());
    flat.setHasContext(in.readBoolean());
    flat.setInputCount(in.readInt());
    flat.setLayerCounts(readIntArray(in));
    flat.setLayerFeedCounts(readIntArray(in));
    flat.setLayerContextCount(readIntArray(in));
    flat.setLayerIndex(readIntArray(in));
    flat.setLayerOutput(readDoubleArray(in));
    flat.setOutputCount(in.readInt());
    flat.setLayerSums(new double[flat.getLayerOutput().length]);
    flat.setWeightIndex(readIntArray(in));
    flat.setWeights(readDoubleArray(in));
    flat.setBiasActivation(readDoubleArray(in));
    // read activations
    flat.setActivationFunctions(new ActivationFunction[flat.getLayerCounts().length]);
    int acSize = in.readInt();
    for (int i = 0; i < acSize; i++) {
        String name = ml.shifu.shifu.core.dtrain.StringUtils.readString(in);
        if (name.equals("ActivationReLU")) {
            name = ActivationReLU.class.getName();
        } else if (name.equals("ActivationLeakyReLU")) {
            name = ActivationLeakyReLU.class.getName();
        } else if (name.equals("ActivationSwish")) {
            name = ActivationSwish.class.getName();
        } else if (name.equals("ActivationPTANH")) {
            name = ActivationPTANH.class.getName();
        } else {
            name = "org.encog.engine.network.activation." + name;
        }
        ActivationFunction af = null;
        try {
            final Class<?> clazz = Class.forName(name);
            af = (ActivationFunction) clazz.newInstance();
        } catch (final ClassNotFoundException e) {
            throw new PersistError(e);
        } catch (final InstantiationException e) {
            throw new PersistError(e);
        } catch (final IllegalAccessException e) {
            throw new PersistError(e);
        }
        double[] params = readDoubleArray(in);
        for (int j = 0; j < params.length; j++) {
            af.setParam(j, params[j]);
        }
        flat.getActivationFunctions()[i] = af;
    }
    // read subset
    int subsetSize = in.readInt();
    Set<Integer> featureList = new HashSet<Integer>();
    for (int i = 0; i < subsetSize; i++) {
        featureList.add(in.readInt());
    }
    result.setFeatureSet(featureList);
    result.getStructure().setFlat(flat);
    return result;
}
Also used : ActivationSwish(ml.shifu.shifu.core.dtrain.nn.ActivationSwish) FlatNetwork(org.encog.neural.flat.FlatNetwork) ActivationFunction(org.encog.engine.network.activation.ActivationFunction) ActivationReLU(ml.shifu.shifu.core.dtrain.nn.ActivationReLU)

Aggregations

ActivationReLU (ml.shifu.shifu.core.dtrain.nn.ActivationReLU)1 ActivationSwish (ml.shifu.shifu.core.dtrain.nn.ActivationSwish)1 ActivationFunction (org.encog.engine.network.activation.ActivationFunction)1 FlatNetwork (org.encog.neural.flat.FlatNetwork)1