use of org.dmg.pmml.neural_network.NeuralLayer in project shifu by ShifuML.
the class PMMLEncogNeuralNetworkModel method adaptMLModelToPMML.
/**
* The function which converts an Encog NeuralNetwork to a PMML
* NeuralNetwork Model.
* <p>
* This function reads the weights from the Encog NeuralNetwork model and assign them to the corresponding
* connections of Neurons in PMML model.
*
* @param bNetwork
* Encog NeuralNetwork
* @param pmmlModel
* DataFieldUtility that provides supplementary data field for
* the model conversion
* @return The generated PMML NeuralNetwork Model
*/
public org.dmg.pmml.neural_network.NeuralNetwork adaptMLModelToPMML(org.encog.neural.networks.BasicNetwork bNetwork, org.dmg.pmml.neural_network.NeuralNetwork pmmlModel) {
network = bNetwork.getFlat();
pmmlModel = new NeuralNetworkModelIntegrator().adaptPMML(pmmlModel);
int[] layerCount = network.getLayerCounts();
int[] layerFeedCount = network.getLayerFeedCounts();
double[] weights = network.getWeights();
ActivationFunction[] functionList = transformActivationFunction(network.getActivationFunctions());
int numLayers = layerCount.length;
int weightID = 0;
List<NeuralLayer> layerList = new ArrayList<NeuralLayer>();
// pmmlModel.withFunctionName(MiningFunctionType.REGRESSION);
for (int i = 0; i < numLayers - 1; i++) {
NeuralLayer layer = new NeuralLayer();
layer.setNumberOfNeurons(layerFeedCount[i]);
layer.setActivationFunction(functionList[i]);
int layerID = numLayers - i - 1;
for (int j = 0; j < layerFeedCount[i]; j++) {
Neuron neuron = new Neuron();
neuron.setId(String.valueOf(layerID + "," + j));
for (int k = 0; k < layerFeedCount[i + 1]; k++) {
neuron.addConnections(new Connection(String.valueOf(layerID - 1 + "," + k), weights[weightID++]));
}
// weights
int tmp = layerCount[i + 1] - layerFeedCount[i + 1];
for (int k = 0; k < tmp; k++) {
neuron.setBias(weights[weightID++]);
}
// bias neuron for each layer
layer.addNeurons(neuron);
}
// finish build Neuron
layerList.add(layer);
}
// finish build layer
// reserve the layer list to fit fot PMML format
Collections.reverse(layerList);
pmmlModel.addNeuralLayers(layerList.toArray(new NeuralLayer[layerList.size()]));
// set neural output based on target id
pmmlModel.setNeuralOutputs(PMMLAdapterCommonUtil.getOutputFields(pmmlModel.getMiningSchema(), numLayers - 1));
return pmmlModel;
}
use of org.dmg.pmml.neural_network.NeuralLayer in project jpmml-r by jpmml.
the class ElmNNConverter method encodeModel.
@Override
public NeuralNetwork encodeModel(Schema schema) {
RGenericVector elmNN = getObject();
RDoubleVector inpweight = elmNN.getDoubleElement("inpweight");
RDoubleVector biashid = elmNN.getDoubleElement("biashid");
RDoubleVector outweight = elmNN.getDoubleElement("outweight");
RStringVector actfun = elmNN.getStringElement("actfun");
RDoubleVector nhid = elmNN.getDoubleElement("nhid");
Label label = schema.getLabel();
List<? extends Feature> features = schema.getFeatures();
switch(actfun.asScalar()) {
case "purelin":
break;
default:
throw new IllegalArgumentException();
}
NeuralInputs neuralInputs = NeuralNetworkUtil.createNeuralInputs(features, DataType.DOUBLE);
List<? extends NeuralEntity> entities = neuralInputs.getNeuralInputs();
List<NeuralLayer> neuralLayers = new ArrayList<>(2);
NeuralLayer hiddenNeuralLayer = new NeuralLayer();
int rows = ValueUtil.asInt(nhid.asScalar());
int columns = 1 + features.size();
for (int row = 0; row < rows; row++) {
List<Double> weights = FortranMatrixUtil.getRow(inpweight.getValues(), rows, columns, row);
Double bias = biashid.getValue(row);
bias += weights.remove(0);
Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weights, bias).setId("hidden/" + String.valueOf(row + 1));
hiddenNeuralLayer.addNeurons(neuron);
}
neuralLayers.add(hiddenNeuralLayer);
entities = hiddenNeuralLayer.getNeurons();
NeuralLayer outputNeuralLayer = new NeuralLayer();
// XXX
columns = 1;
for (int column = 0; column < columns; column++) {
List<Double> weights = FortranMatrixUtil.getColumn(outweight.getValues(), rows, columns, column);
Double bias = null;
Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weights, bias).setId("output/" + String.valueOf(column + 1));
outputNeuralLayer.addNeurons(neuron);
}
neuralLayers.add(outputNeuralLayer);
entities = outputNeuralLayer.getNeurons();
NeuralOutputs neuralOutputs = NeuralNetworkUtil.createRegressionNeuralOutputs(entities, (ContinuousLabel) label);
NeuralNetwork neuralNetwork = new NeuralNetwork(MiningFunction.REGRESSION, NeuralNetwork.ActivationFunction.IDENTITY, ModelUtil.createMiningSchema(label), neuralInputs, neuralLayers).setNeuralOutputs(neuralOutputs);
return neuralNetwork;
}
use of org.dmg.pmml.neural_network.NeuralLayer in project jpmml-r by jpmml.
the class NNetConverter method encodeNeuralLayer.
private static NeuralLayer encodeNeuralLayer(String prefix, int n, List<? extends NeuralEntity> entities, RDoubleVector wts, int offset) {
NeuralLayer neuralLayer = new NeuralLayer();
for (int i = 0; i < n; i++) {
List<Double> weights = (wts.getValues()).subList(offset + 1, offset + (entities.size() + 1));
Double bias = wts.getValue(offset);
Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weights, bias).setId(prefix + "/" + String.valueOf(i + 1));
neuralLayer.addNeurons(neuron);
offset += (entities.size() + 1);
}
return neuralLayer;
}
use of org.dmg.pmml.neural_network.NeuralLayer in project jpmml-sparkml by jpmml.
the class MultilayerPerceptronClassificationModelConverter method encodeModel.
@Override
public NeuralNetwork encodeModel(Schema schema) {
MultilayerPerceptronClassificationModel model = getTransformer();
int[] layers = model.layers();
Vector weights = model.weights();
CategoricalLabel categoricalLabel = (CategoricalLabel) schema.getLabel();
if (categoricalLabel.size() != layers[layers.length - 1]) {
throw new IllegalArgumentException();
}
List<? extends Feature> features = schema.getFeatures();
if (features.size() != layers[0]) {
throw new IllegalArgumentException();
}
NeuralInputs neuralInputs = NeuralNetworkUtil.createNeuralInputs(features, DataType.DOUBLE);
List<? extends Entity> entities = neuralInputs.getNeuralInputs();
List<NeuralLayer> neuralLayers = new ArrayList<>();
int weightPos = 0;
for (int layer = 1; layer < layers.length; layer++) {
NeuralLayer neuralLayer = new NeuralLayer();
int rows = entities.size();
int columns = layers[layer];
List<List<Double>> weightMatrix = new ArrayList<>();
for (int column = 0; column < columns; column++) {
List<Double> weightVector = new ArrayList<>();
for (int row = 0; row < rows; row++) {
weightVector.add(weights.apply(weightPos + (row * columns) + column));
}
weightMatrix.add(weightVector);
}
weightPos += (rows * columns);
for (int column = 0; column < columns; column++) {
List<Double> weightVector = weightMatrix.get(column);
Double bias = weights.apply(weightPos);
Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weightVector, bias).setId(String.valueOf(layer) + "/" + String.valueOf(column + 1));
neuralLayer.addNeurons(neuron);
weightPos++;
}
if (layer == (layers.length - 1)) {
neuralLayer.setActivationFunction(NeuralNetwork.ActivationFunction.IDENTITY).setNormalizationMethod(NeuralNetwork.NormalizationMethod.SOFTMAX);
}
neuralLayers.add(neuralLayer);
entities = neuralLayer.getNeurons();
}
if (weightPos != weights.size()) {
throw new IllegalArgumentException();
}
NeuralNetwork neuralNetwork = new NeuralNetwork(MiningFunction.CLASSIFICATION, NeuralNetwork.ActivationFunction.LOGISTIC, ModelUtil.createMiningSchema(categoricalLabel), neuralInputs, neuralLayers).setNeuralOutputs(NeuralNetworkUtil.createClassificationNeuralOutputs(entities, categoricalLabel));
return neuralNetwork;
}
use of org.dmg.pmml.neural_network.NeuralLayer in project jpmml-r by jpmml.
the class NNConverter method encodeModel.
@Override
public Model encodeModel(Schema schema) {
RGenericVector nn = getObject();
RExp actFct = nn.getElement("act.fct");
RBooleanVector linearOutput = nn.getBooleanElement("linear.output");
RGenericVector weights = nn.getGenericElement("weights");
RStringVector actFctType = actFct.getStringAttribute("type");
// Select the first repetition
weights = weights.getGenericValue(0);
NeuralNetwork.ActivationFunction activationFunction = NeuralNetwork.ActivationFunction.LOGISTIC;
switch(actFctType.asScalar()) {
case "logistic":
activationFunction = NeuralNetwork.ActivationFunction.LOGISTIC;
break;
case "tanh":
activationFunction = NeuralNetwork.ActivationFunction.TANH;
break;
default:
throw new IllegalArgumentException();
}
ContinuousLabel continuousLabel = (ContinuousLabel) schema.getLabel();
List<? extends Feature> features = schema.getFeatures();
NeuralInputs neuralInputs = NeuralNetworkUtil.createNeuralInputs(features, DataType.DOUBLE);
List<NeuralLayer> neuralLayers = new ArrayList<>();
List<? extends NeuralEntity> entities = neuralInputs.getNeuralInputs();
for (int i = 0; i < weights.size(); i++) {
boolean hidden = (i < (weights.size() - 1));
NeuralLayer neuralLayer = new NeuralLayer();
if (hidden || (linearOutput != null && !linearOutput.asScalar())) {
neuralLayer.setActivationFunction(activationFunction);
}
RDoubleVector layerWeights = weights.getDoubleValue(i);
RIntegerVector layerDim = layerWeights.dim();
int layerRows = layerDim.getValue(0);
int layerColumns = layerDim.getValue(1);
for (int j = 0; j < layerColumns; j++) {
List<Double> neuronWeights = FortranMatrixUtil.getColumn(layerWeights.getValues(), layerRows, layerColumns, j);
String id;
if (hidden) {
id = "hidden/" + String.valueOf(i) + "/" + String.valueOf(j);
} else {
id = "output/" + String.valueOf(j);
}
Neuron neuron = NeuralNetworkUtil.createNeuron(entities, neuronWeights.subList(1, neuronWeights.size()), neuronWeights.get(0)).setId(id);
neuralLayer.addNeurons(neuron);
}
neuralLayers.add(neuralLayer);
entities = neuralLayer.getNeurons();
}
NeuralNetwork neuralNetwork = new NeuralNetwork(MiningFunction.REGRESSION, NeuralNetwork.ActivationFunction.IDENTITY, ModelUtil.createMiningSchema(continuousLabel), neuralInputs, neuralLayers).setNeuralOutputs(NeuralNetworkUtil.createRegressionNeuralOutputs(entities, continuousLabel));
return neuralNetwork;
}
Aggregations