use of org.jpmml.converter.ContinuousLabel in project jpmml-r by jpmml.
the class GBMConverter method encodeModel.
@Override
public MiningModel encodeModel(Schema schema) {
RGenericVector gbm = getObject();
RDoubleVector initF = (RDoubleVector) gbm.getValue("initF");
RGenericVector trees = (RGenericVector) gbm.getValue("trees");
RGenericVector c_splits = (RGenericVector) gbm.getValue("c.splits");
RGenericVector distribution = (RGenericVector) gbm.getValue("distribution");
RStringVector distributionName = (RStringVector) distribution.getValue("name");
Schema segmentSchema = new Schema(new ContinuousLabel(null, DataType.DOUBLE), schema.getFeatures());
List<TreeModel> treeModels = new ArrayList<>();
for (int i = 0; i < trees.size(); i++) {
RGenericVector tree = (RGenericVector) trees.getValue(i);
TreeModel treeModel = encodeTreeModel(MiningFunction.REGRESSION, tree, c_splits, segmentSchema);
treeModels.add(treeModel);
}
MiningModel miningModel = encodeMiningModel(distributionName, treeModels, initF.asScalar(), schema);
return miningModel;
}
use of org.jpmml.converter.ContinuousLabel in project jpmml-r by jpmml.
the class ElmNNConverter method encodeModel.
@Override
public NeuralNetwork encodeModel(Schema schema) {
RGenericVector elmNN = getObject();
RDoubleVector inpweight = (RDoubleVector) elmNN.getValue("inpweight");
RDoubleVector biashid = (RDoubleVector) elmNN.getValue("biashid");
RDoubleVector outweight = (RDoubleVector) elmNN.getValue("outweight");
RStringVector actfun = (RStringVector) elmNN.getValue("actfun");
RDoubleVector nhid = (RDoubleVector) elmNN.getValue("nhid");
Label label = schema.getLabel();
List<? extends Feature> features = schema.getFeatures();
switch(actfun.asScalar()) {
case "purelin":
break;
default:
throw new IllegalArgumentException();
}
NeuralInputs neuralInputs = NeuralNetworkUtil.createNeuralInputs(features, DataType.DOUBLE);
List<? extends Entity> entities = neuralInputs.getNeuralInputs();
List<NeuralLayer> neuralLayers = new ArrayList<>(2);
NeuralLayer hiddenNeuralLayer = new NeuralLayer();
int rows = ValueUtil.asInt(nhid.asScalar());
int columns = 1 + features.size();
for (int row = 0; row < rows; row++) {
List<Double> weights = FortranMatrixUtil.getRow(inpweight.getValues(), rows, columns, row);
Double bias = biashid.getValue(row);
bias += weights.remove(0);
Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weights, bias).setId("hidden/" + String.valueOf(row + 1));
hiddenNeuralLayer.addNeurons(neuron);
}
neuralLayers.add(hiddenNeuralLayer);
entities = hiddenNeuralLayer.getNeurons();
NeuralLayer outputNeuralLayer = new NeuralLayer();
// XXX
columns = 1;
for (int column = 0; column < columns; column++) {
List<Double> weights = FortranMatrixUtil.getColumn(outweight.getValues(), rows, columns, column);
Double bias = Double.NaN;
Neuron neuron = NeuralNetworkUtil.createNeuron(entities, weights, bias).setId("output/" + String.valueOf(column + 1));
outputNeuralLayer.addNeurons(neuron);
}
neuralLayers.add(outputNeuralLayer);
entities = outputNeuralLayer.getNeurons();
NeuralOutputs neuralOutputs = NeuralNetworkUtil.createRegressionNeuralOutputs(entities, (ContinuousLabel) label);
NeuralNetwork neuralNetwork = new NeuralNetwork(MiningFunction.REGRESSION, NeuralNetwork.ActivationFunction.IDENTITY, ModelUtil.createMiningSchema(label), neuralInputs, neuralLayers).setNeuralOutputs(neuralOutputs);
return neuralNetwork;
}
use of org.jpmml.converter.ContinuousLabel in project jpmml-sparkml by jpmml.
the class ModelConverter method encodeSchema.
public Schema encodeSchema(SparkMLEncoder encoder) {
T model = getTransformer();
Label label = null;
if (model instanceof HasLabelCol) {
HasLabelCol hasLabelCol = (HasLabelCol) model;
String labelCol = hasLabelCol.getLabelCol();
Feature feature = encoder.getOnlyFeature(labelCol);
MiningFunction miningFunction = getMiningFunction();
switch(miningFunction) {
case CLASSIFICATION:
{
if (feature instanceof CategoricalFeature) {
CategoricalFeature categoricalFeature = (CategoricalFeature) feature;
DataField dataField = encoder.getDataField(categoricalFeature.getName());
label = new CategoricalLabel(dataField);
} else if (feature instanceof ContinuousFeature) {
ContinuousFeature continuousFeature = (ContinuousFeature) feature;
int numClasses = 2;
if (model instanceof ClassificationModel) {
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>) model;
numClasses = classificationModel.numClasses();
}
List<String> categories = new ArrayList<>();
for (int i = 0; i < numClasses; i++) {
categories.add(String.valueOf(i));
}
Field<?> field = encoder.toCategorical(continuousFeature.getName(), categories);
encoder.putOnlyFeature(labelCol, new CategoricalFeature(encoder, field, categories));
label = new CategoricalLabel(field.getName(), field.getDataType(), categories);
} else {
throw new IllegalArgumentException("Expected a categorical or categorical-like continuous feature, got " + feature);
}
}
break;
case REGRESSION:
{
Field<?> field = encoder.toContinuous(feature.getName());
field.setDataType(DataType.DOUBLE);
label = new ContinuousLabel(field.getName(), field.getDataType());
}
break;
default:
throw new IllegalArgumentException("Mining function " + miningFunction + " is not supported");
}
}
if (model instanceof ClassificationModel) {
ClassificationModel<?, ?> classificationModel = (ClassificationModel<?, ?>) model;
CategoricalLabel categoricalLabel = (CategoricalLabel) label;
int numClasses = classificationModel.numClasses();
if (numClasses != categoricalLabel.size()) {
throw new IllegalArgumentException("Expected " + numClasses + " target categories, got " + categoricalLabel.size() + " target categories");
}
}
String featuresCol = model.getFeaturesCol();
List<Feature> features = encoder.getFeatures(featuresCol);
if (model instanceof PredictionModel) {
PredictionModel<?, ?> predictionModel = (PredictionModel<?, ?>) model;
int numFeatures = predictionModel.numFeatures();
if (numFeatures != -1 && features.size() != numFeatures) {
throw new IllegalArgumentException("Expected " + numFeatures + " features, got " + features.size() + " features");
}
}
Schema result = new Schema(label, features);
return result;
}
use of org.jpmml.converter.ContinuousLabel in project jpmml-r by jpmml.
the class GBMConverter method encodeBinaryClassification.
private MiningModel encodeBinaryClassification(List<TreeModel> treeModels, Double initF, double coefficient, Schema schema) {
Schema segmentSchema = new Schema(new ContinuousLabel(null, DataType.DOUBLE), schema.getFeatures());
MiningModel miningModel = createMiningModel(treeModels, initF, segmentSchema).setOutput(ModelUtil.createPredictedOutput(FieldName.create("gbmValue"), OpType.CONTINUOUS, DataType.DOUBLE));
return MiningModelUtil.createBinaryLogisticClassification(miningModel, -coefficient, 0d, RegressionModel.NormalizationMethod.LOGIT, true, schema);
}
use of org.jpmml.converter.ContinuousLabel in project jpmml-r by jpmml.
the class GBMConverter method encodeMultinomialClassification.
private MiningModel encodeMultinomialClassification(List<TreeModel> treeModels, Double initF, Schema schema) {
CategoricalLabel categoricalLabel = (CategoricalLabel) schema.getLabel();
Schema segmentSchema = new Schema(new ContinuousLabel(null, DataType.DOUBLE), schema.getFeatures());
List<Model> miningModels = new ArrayList<>();
for (int i = 0, columns = categoricalLabel.size(), rows = (treeModels.size() / columns); i < columns; i++) {
MiningModel miningModel = createMiningModel(CMatrixUtil.getColumn(treeModels, rows, columns, i), initF, segmentSchema).setOutput(ModelUtil.createPredictedOutput(FieldName.create("gbmValue(" + categoricalLabel.getValue(i) + ")"), OpType.CONTINUOUS, DataType.DOUBLE));
miningModels.add(miningModel);
}
return MiningModelUtil.createClassification(miningModels, RegressionModel.NormalizationMethod.SOFTMAX, true, schema);
}
Aggregations