use of org.dmg.pmml.neural_network.NeuralInputs in project jpmml-r by jpmml.
the class ElmNNConverter method encodeModel.
@Override
public NeuralNetwork encodeModel(Schema schema) {
RGenericVector elmNN = getObject();
RDoubleVector inpweight = (RDoubleVector) elmNN.getValue("inpweight");
RDoubleVector biashid = (RDoubleVector) elmNN.getValue("biashid");
RDoubleVector outweight = (RDoubleVector) elmNN.getValue("outweight");
RStringVector actfun = (RStringVector) elmNN.getValue("actfun");
RDoubleVector nhid = (RDoubleVector) elmNN.getValue("nhid");
Label label = schema.getLabel();
List<? extends Feature> features = schema.getFeatures();
switch(actfun.asScalar()) {
case "purelin":
break;
default:
throw new IllegalArgumentException();
}
NeuralInputs neuralInputs = NeuralNetworkUtil.createNeuralInputs(features, DataType.DOUBLE);
List<? extends Entity> entities = neuralInputs.getNeuralInputs();
List<NeuralLayer> neuralLayers = new ArrayList<>(2);
NeuralLayer hiddenNeuralLayer = new NeuralLayer();
int rows = ValueUtil.asInt(nhid.asScalar());
int columns = 1 + features.size();
for (int row = 0; row < rows; row++) {
List<Double> weights = FortranMatrixUtil.getRow(inpweight.getValues(), rows, columns, row);
Double bias = biashid.getValue(row);
bias += weights.remove(0);
Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weights, bias).setId("hidden/" + String.valueOf(row + 1));
hiddenNeuralLayer.addNeurons(neuron);
}
neuralLayers.add(hiddenNeuralLayer);
entities = hiddenNeuralLayer.getNeurons();
NeuralLayer outputNeuralLayer = new NeuralLayer();
// XXX
columns = 1;
for (int column = 0; column < columns; column++) {
List<Double> weights = FortranMatrixUtil.getColumn(outweight.getValues(), rows, columns, column);
Double bias = Double.NaN;
Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weights, bias).setId("output/" + String.valueOf(column + 1));
outputNeuralLayer.addNeurons(neuron);
}
neuralLayers.add(outputNeuralLayer);
entities = outputNeuralLayer.getNeurons();
NeuralOutputs neuralOutputs = NeuralNetworkUtil.createRegressionNeuralOutputs(entities, (ContinuousLabel) label);
NeuralNetwork neuralNetwork = new NeuralNetwork(MiningFunction.REGRESSION, NeuralNetwork.ActivationFunction.IDENTITY, ModelUtil.createMiningSchema(label), neuralInputs, neuralLayers).setNeuralOutputs(neuralOutputs);
return neuralNetwork;
}
use of org.dmg.pmml.neural_network.NeuralInputs in project jpmml-sparkml by jpmml.
the class MultilayerPerceptronClassificationModelConverter method encodeModel.
@Override
public NeuralNetwork encodeModel(Schema schema) {
MultilayerPerceptronClassificationModel model = getTransformer();
int[] layers = model.layers();
Vector weights = model.weights();
CategoricalLabel categoricalLabel = (CategoricalLabel) schema.getLabel();
if (categoricalLabel.size() != layers[layers.length - 1]) {
throw new IllegalArgumentException();
}
List<? extends Feature> features = schema.getFeatures();
if (features.size() != layers[0]) {
throw new IllegalArgumentException();
}
NeuralInputs neuralInputs = NeuralNetworkUtil.createNeuralInputs(features, DataType.DOUBLE);
List<? extends Entity> entities = neuralInputs.getNeuralInputs();
List<NeuralLayer> neuralLayers = new ArrayList<>();
int weightPos = 0;
for (int layer = 1; layer < layers.length; layer++) {
NeuralLayer neuralLayer = new NeuralLayer();
int rows = entities.size();
int columns = layers[layer];
List<List<Double>> weightMatrix = new ArrayList<>();
for (int column = 0; column < columns; column++) {
List<Double> weightVector = new ArrayList<>();
for (int row = 0; row < rows; row++) {
weightVector.add(weights.apply(weightPos + (row * columns) + column));
}
weightMatrix.add(weightVector);
}
weightPos += (rows * columns);
for (int column = 0; column < columns; column++) {
List<Double> weightVector = weightMatrix.get(column);
Double bias = weights.apply(weightPos);
Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weightVector, bias).setId(String.valueOf(layer) + "/" + String.valueOf(column + 1));
neuralLayer.addNeurons(neuron);
weightPos++;
}
if (layer == (layers.length - 1)) {
neuralLayer.setActivationFunction(NeuralNetwork.ActivationFunction.IDENTITY).setNormalizationMethod(NeuralNetwork.NormalizationMethod.SOFTMAX);
}
neuralLayers.add(neuralLayer);
entities = neuralLayer.getNeurons();
}
if (weightPos != weights.size()) {
throw new IllegalArgumentException();
}
NeuralNetwork neuralNetwork = new NeuralNetwork(MiningFunction.CLASSIFICATION, NeuralNetwork.ActivationFunction.LOGISTIC, ModelUtil.createMiningSchema(categoricalLabel), neuralInputs, neuralLayers).setNeuralOutputs(NeuralNetworkUtil.createClassificationNeuralOutputs(entities, categoricalLabel));
return neuralNetwork;
}
use of org.dmg.pmml.neural_network.NeuralInputs in project shifu by ShifuML.
the class NeuralNetworkModelIntegrator method getNeuralInputs.
private NeuralInputs getNeuralInputs(final NeuralNetwork model) {
NeuralInputs nnInputs = new NeuralInputs();
// get HashMap for local transform and MiningSchema fields
HashMap<FieldName, FieldName> reversMiningTransformMap = new HashMap<FieldName, FieldName>();
HashMap<FieldName, List<FieldName>> treeMapOfTransform = new HashMap<FieldName, List<FieldName>>();
for (DerivedField dField : model.getLocalTransformations().getDerivedFields()) {
// Apply z-scale normalization on numerical variables
FieldName parentField = null;
if (dField.getExpression() instanceof NormContinuous) {
parentField = ((NormContinuous) dField.getExpression()).getField();
reversMiningTransformMap.put(dField.getName(), parentField);
} else // Apply bin map on categorical variables
if (dField.getExpression() instanceof MapValues) {
parentField = ((MapValues) dField.getExpression()).getFieldColumnPairs().get(0).getField();
reversMiningTransformMap.put(dField.getName(), parentField);
} else if (dField.getExpression() instanceof Discretize) {
parentField = ((Discretize) dField.getExpression()).getField();
reversMiningTransformMap.put(dField.getName(), parentField);
}
List<FieldName> fieldNames = treeMapOfTransform.get(parentField);
if (fieldNames == null) {
fieldNames = new ArrayList<FieldName>();
}
fieldNames.add(dField.getName());
treeMapOfTransform.put(parentField, fieldNames);
}
// comment here
List<MiningField> miningList = model.getMiningSchema().getMiningFields();
int index = 0;
for (DerivedField dField : model.getLocalTransformations().getDerivedFields()) {
List<FieldName> list = treeMapOfTransform.get(dField.getName());
boolean isLeaf = (list == null || list.size() == 0);
FieldName root = getRoot(dField.getName(), reversMiningTransformMap);
if (isLeaf && isRootInMiningList(root, miningList)) {
DerivedField field = new DerivedField(OpType.CONTINUOUS, DataType.DOUBLE).setName(dField.getName()).setExpression(new FieldRef(dField.getName()));
nnInputs.addNeuralInputs(new NeuralInput("0," + (index++), field));
}
}
DerivedField field = new DerivedField(OpType.CONTINUOUS, DataType.DOUBLE).setName(new FieldName(PluginConstants.biasValue)).setExpression(new FieldRef(new FieldName(PluginConstants.biasValue)));
nnInputs.addNeuralInputs(new NeuralInput(PluginConstants.biasValue, field));
return nnInputs;
}
Aggregations