use of org.apache.ignite.ml.inference.parser.IgniteModelParser in project ignite by apache.
the class IgniteModelDistributedInferenceExample method main.
/**
* Run example.
*/
public static void main(String... args) throws IOException, ExecutionException, InterruptedException {
System.out.println();
System.out.println(">>> Linear regression model over cache based dataset usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
IgniteCache<Integer, Vector> dataCache = null;
try {
dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.MORTALITY_DATA);
System.out.println(">>> Create new linear regression trainer object.");
LinearRegressionLSQRTrainer trainer = new LinearRegressionLSQRTrainer();
System.out.println(">>> Perform the training to get the model.");
LinearRegressionModel mdl = trainer.fit(ignite, dataCache, new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST));
System.out.println(">>> Linear regression model: " + mdl);
System.out.println(">>> Preparing model reader and model parser.");
ModelReader reader = new InMemoryModelReader(mdl);
ModelParser<Vector, Double, ?> parser = new IgniteModelParser<>();
try (Model<Vector, Future<Double>> infMdl = new IgniteDistributedModelBuilder(ignite, 4, 4).build(reader, parser)) {
System.out.println(">>> Inference model is ready.");
System.out.println(">>> ---------------------------------");
System.out.println(">>> | Prediction\t| Ground Truth\t|");
System.out.println(">>> ---------------------------------");
try (QueryCursor<Cache.Entry<Integer, Vector>> observations = dataCache.query(new ScanQuery<>())) {
for (Cache.Entry<Integer, Vector> observation : observations) {
Vector val = observation.getValue();
Vector inputs = val.copyOfRange(1, val.size());
double groundTruth = val.get(0);
double prediction = infMdl.predict(inputs).get();
System.out.printf(">>> | %.4f\t\t| %.4f\t\t|\n", prediction, groundTruth);
}
}
}
System.out.println(">>> ---------------------------------");
System.out.println(">>> Linear regression model over cache based dataset usage example completed.");
} finally {
if (dataCache != null)
dataCache.destroy();
}
} finally {
System.out.flush();
}
}
use of org.apache.ignite.ml.inference.parser.IgniteModelParser in project ignite by apache.
the class IgniteModelStorageUtil method saveModelDescriptor.
/**
* Saves model descriptor into descriptor storage if a model with given name is not saved yet, otherwise throws
* exception. To save model with the same name remove old model first.
*
* @param ignite Ignite instance.
* @param name Model name.
* @param mdlId Model identifier used to find model in model storage (only with {@link ModelStorageModelReader}).
* @throws IllegalArgumentException If model with given name was already saved.
*/
private static void saveModelDescriptor(Ignite ignite, String name, UUID mdlId) {
ModelDescriptorStorage descStorage = new ModelDescriptorStorageFactory().getModelDescriptorStorage(ignite);
boolean saved = descStorage.putIfAbsent(name, new ModelDescriptor(mdlId.toString(), null, new ModelSignature(null, null, null), new ModelStorageModelReader(IGNITE_MDL_FOLDER + "/" + mdlId), new IgniteModelParser<>()));
if (!saved)
throw new IllegalArgumentException("Model descriptor with given name already exists [name=" + name + "]");
}
use of org.apache.ignite.ml.inference.parser.IgniteModelParser in project ignite by apache.
the class ModelStorageExample method main.
/**
* Run example.
*/
public static void main(String... args) throws IOException, ClassNotFoundException {
try (Ignite ignite = Ignition.start("examples/config/example-ignite-ml.xml")) {
System.out.println(">>> Ignite grid started.");
ModelStorage storage = new ModelStorageFactory().getModelStorage(ignite);
ModelDescriptorStorage descStorage = new ModelDescriptorStorageFactory().getModelDescriptorStorage(ignite);
System.out.println("Saving model into model storage...");
byte[] mdl = serialize((IgniteModel<byte[], byte[]>) i -> i);
storage.mkdirs("/");
storage.putFile("/my_model", mdl);
System.out.println("Saving model descriptor into model descriptor storage...");
ModelDescriptor desc = new ModelDescriptor("MyModel", "My Cool Model", new ModelSignature("", "", ""), new ModelStorageModelReader("/my_model"), new IgniteModelParser<>());
descStorage.put("my_model", desc);
System.out.println("List saved models...");
for (IgniteBiTuple<String, ModelDescriptor> model : descStorage) System.out.println("-> {'" + model.getKey() + "' : " + model.getValue() + "}");
System.out.println("Load saved model descriptor...");
desc = descStorage.get("my_model");
System.out.println("Build inference model...");
SingleModelBuilder mdlBuilder = new SingleModelBuilder();
try (Model<byte[], byte[]> infMdl = mdlBuilder.build(desc.getReader(), desc.getParser())) {
System.out.println("Make inference...");
for (int i = 0; i < 10; i++) {
Integer res = deserialize(infMdl.predict(serialize(i)));
System.out.println(i + " -> " + res);
}
}
} finally {
System.out.flush();
}
}
Aggregations