use of hex.deeplearning.DeepLearningModel.DeepLearningParameters in project h2o-3 by h2oai.
the class DeepLearningTest method elasticAveraging.
@Ignore
@Test
public void elasticAveraging() {
DeepLearningParameters dl;
Frame frTrain;
int N = 2;
DeepLearningModel[] models = new DeepLearningModel[N];
dl = new DeepLearningParameters();
Scope.enter();
//new Random().nextBoolean();
boolean covtype = true;
if (covtype) {
frTrain = parse_test_file("./smalldata/covtype/covtype.20k.data");
Vec resp = frTrain.lastVec().toCategoricalVec();
frTrain.remove(frTrain.vecs().length - 1).remove();
frTrain.add("Response", resp);
} else {
frTrain = parse_test_file("./bigdata/server/HIGGS.csv");
Vec resp = frTrain.vecs()[0].toCategoricalVec();
frTrain.remove(0).remove();
frTrain.prepend("Response", resp);
}
DKV.put(frTrain);
try {
for (int i = 0; i < N; ++i) {
dl._train = frTrain._key;
String[] n = ((Frame) DKV.getGet(dl._train)).names();
if (covtype) {
dl._response_column = n[n.length - 1];
dl._ignored_columns = null;
} else {
dl._response_column = n[0];
dl._ignored_columns = new String[] { n[22], n[23], n[24], n[25], n[26], n[27], n[28] };
}
dl._export_weights_and_biases = true;
dl._hidden = new int[] { 64, 64 };
dl._quiet_mode = false;
dl._max_w2 = 10;
dl._l1 = 1e-5;
dl._reproducible = false;
//every node only has a piece of the data
dl._replicate_training_data = false;
//use multi-node
dl._force_load_balance = true;
dl._epochs = 10;
//100 M/R steps per epoch
dl._train_samples_per_iteration = frTrain.numRows() / 100;
dl._elastic_averaging = i == 1;
dl._elastic_averaging_moving_rate = 0.999;
dl._elastic_averaging_regularization = 1e-4;
// Invoke DL and block till the end
DeepLearning job = new DeepLearning(dl);
// Get the model
models[i] = job.trainModel().get();
}
for (int i = 0; i < N; ++i) {
if (models[i] != null)
Log.info(models[i]._output._training_metrics.cm().table().toString());
}
if (models[0] != null)
Log.info("Without elastic averaging: error=" + models[0]._output._training_metrics.cm().err());
if (models[1] != null)
Log.info("With elastic averaging: error=" + models[1]._output._training_metrics.cm().err());
// if (models[0] != null && models[1] != null)
// Assert.assertTrue(models[1]._output._training_metrics.cm().err() < models[0]._output._training_metrics.cm().err());
} finally {
frTrain.remove();
for (int i = 0; i < N; ++i) if (models[i] != null)
models[i].delete();
Scope.exit();
}
}
use of hex.deeplearning.DeepLearningModel.DeepLearningParameters in project h2o-3 by h2oai.
the class DeepLearningTest method testVarimp.
@Test
public void testVarimp() {
Frame tfr = null;
DeepLearningModel dl = null;
try {
tfr = parse_test_file("./smalldata/iris/iris.csv");
DeepLearningParameters parms = new DeepLearningParameters();
parms._train = tfr._key;
parms._epochs = 100;
parms._response_column = "C5";
parms._reproducible = true;
parms._classification_stop = 0.7;
parms._score_duty_cycle = 1;
parms._score_interval = 0;
parms._hidden = new int[] { 100, 100 };
parms._seed = 0xdecaf;
parms._variable_importances = true;
// Build a first model; all remaining models should be equal
dl = new DeepLearning(parms).trainModel().get();
Assert.assertTrue(dl.varImp()._varimp != null);
//for code coverage only
Log.info(dl.model_info().toStringAll());
//all features matter
Assert.assertTrue(ArrayUtils.minValue(dl.varImp()._varimp) > 0.5);
//all features matter
Assert.assertTrue(ArrayUtils.maxValue(dl.varImp()._varimp) <= 1);
} finally {
if (tfr != null)
tfr.delete();
if (dl != null)
dl.delete();
}
}
use of hex.deeplearning.DeepLearningModel.DeepLearningParameters in project h2o-3 by h2oai.
the class DeepLearningTest method testCheckpointOverwriteWithBestModel.
// Check that the restarted model honors overwrite_with_best_model
@Test
public void testCheckpointOverwriteWithBestModel() {
Frame tfr = null;
DeepLearningModel dl = null;
DeepLearningModel dl2 = null;
Frame train = null, valid = null;
try {
tfr = parse_test_file("./smalldata/iris/iris.csv");
FrameSplitter fs = new FrameSplitter(tfr, new double[] { 0.8 }, new Key[] { Key.make("train"), Key.make("valid") }, null);
fs.compute2();
train = fs.getResult()[0];
valid = fs.getResult()[1];
DeepLearningParameters parms = new DeepLearningParameters();
parms._train = train._key;
parms._valid = valid._key;
parms._epochs = 1;
parms._response_column = "C5";
parms._reproducible = true;
parms._hidden = new int[] { 50, 50 };
parms._seed = 0xdecaf;
parms._train_samples_per_iteration = 0;
parms._score_duty_cycle = 1;
parms._score_interval = 0;
parms._stopping_rounds = 0;
parms._overwrite_with_best_model = true;
dl = new DeepLearning(parms).trainModel().get();
double ll1 = ((ModelMetricsMultinomial) dl._output._validation_metrics).logloss();
DeepLearningParameters parms2 = (DeepLearningParameters) parms.clone();
parms2._epochs = 10;
parms2._checkpoint = dl._key;
dl2 = new DeepLearning(parms2).trainModel().get();
double ll2 = ((ModelMetricsMultinomial) dl2._output._validation_metrics).logloss();
Assert.assertTrue(ll2 <= ll1);
} finally {
if (tfr != null)
tfr.delete();
if (dl != null)
dl.delete();
if (dl2 != null)
dl2.delete();
if (train != null)
train.delete();
if (valid != null)
valid.delete();
}
}
use of hex.deeplearning.DeepLearningModel.DeepLearningParameters in project h2o-3 by h2oai.
the class DeepLearningTest method testCategoricalEncodingEigen.
@Test
public void testCategoricalEncodingEigen() {
Frame tfr = null;
Frame vfr = null;
DeepLearningModel dl = null;
try {
String response = "survived";
tfr = parse_test_file("./smalldata/junit/titanic_alt.csv");
vfr = parse_test_file("./smalldata/junit/titanic_alt.csv");
if (tfr.vec(response).isBinary()) {
Vec v = tfr.remove(response);
tfr.add(response, v.toCategoricalVec());
v.remove();
}
if (vfr.vec(response).isBinary()) {
Vec v = vfr.remove(response);
vfr.add(response, v.toCategoricalVec());
v.remove();
}
DKV.put(tfr);
DKV.put(vfr);
DeepLearningParameters parms = new DeepLearningParameters();
parms._train = tfr._key;
parms._valid = vfr._key;
parms._response_column = response;
parms._reproducible = true;
parms._hidden = new int[] { 20, 20 };
parms._seed = 0xdecaf;
parms._categorical_encoding = Model.Parameters.CategoricalEncodingScheme.Eigen;
parms._score_training_samples = 0;
dl = new DeepLearning(parms).trainModel().get();
Assert.assertEquals(((ModelMetricsBinomial) dl._output._training_metrics)._logloss, ((ModelMetricsBinomial) dl._output._validation_metrics)._logloss, 1e-8);
} finally {
if (tfr != null)
tfr.remove();
if (vfr != null)
vfr.remove();
if (dl != null)
dl.delete();
}
}
use of hex.deeplearning.DeepLearningModel.DeepLearningParameters in project h2o-3 by h2oai.
the class DeepLearningTest method testLossFunctions.
// just a simple sanity check - not a golden test
@Test
public void testLossFunctions() {
Frame tfr = null, vfr = null, fr2 = null;
DeepLearningModel dl = null;
for (DeepLearningParameters.Loss loss : new DeepLearningParameters.Loss[] { DeepLearningParameters.Loss.Automatic, DeepLearningParameters.Loss.Quadratic, DeepLearningParameters.Loss.Huber, DeepLearningParameters.Loss.Absolute, DeepLearningParameters.Loss.Quantile }) {
Scope.enter();
try {
tfr = parse_test_file("smalldata/glm_test/cancar_logIn.csv");
for (String s : new String[] { "Merit", "Class" }) {
Scope.track(tfr.replace(tfr.find(s), tfr.vec(s).toCategoricalVec()));
}
DKV.put(tfr);
DeepLearningParameters parms = new DeepLearningParameters();
parms._train = tfr._key;
parms._epochs = 1;
parms._reproducible = true;
parms._hidden = new int[] { 50, 50 };
parms._response_column = "Cost";
parms._seed = 0xdecaf;
parms._loss = loss;
// Build a first model; all remaining models should be equal
DeepLearning job = new DeepLearning(parms);
dl = job.trainModel().get();
ModelMetricsRegression mm = (ModelMetricsRegression) dl._output._training_metrics;
if (loss == DeepLearningParameters.Loss.Automatic || loss == DeepLearningParameters.Loss.Quadratic)
Assert.assertEquals(mm._mean_residual_deviance, mm._MSE, 1e-6);
else
assertTrue(mm._mean_residual_deviance != mm._MSE);
assertTrue(dl.testJavaScoring(tfr, fr2 = dl.score(tfr), 1e-5));
} finally {
if (tfr != null)
tfr.remove();
if (dl != null)
dl.delete();
if (fr2 != null)
fr2.remove();
Scope.exit();
}
}
}
Aggregations