use of hex.deeplearning.DeepLearningModel.DeepLearningParameters in project h2o-3 by h2oai.
the class DeepLearningTest method testAutoEncoder.
@Test
public void testAutoEncoder() {
Frame tfr = null, vfr = null, fr2 = null;
DeepLearningModel dl = null;
Scope.enter();
try {
tfr = parse_test_file("smalldata/glm_test/cancar_logIn.csv");
for (String s : new String[] { "Merit", "Class" }) {
Scope.track(tfr.replace(tfr.find(s), tfr.vec(s).toCategoricalVec()));
}
DKV.put(tfr);
DeepLearningParameters parms = new DeepLearningParameters();
parms._train = tfr._key;
parms._epochs = 100;
parms._reproducible = true;
parms._hidden = new int[] { 5, 5, 5 };
parms._response_column = "Cost";
parms._seed = 0xdecaf;
parms._autoencoder = true;
parms._input_dropout_ratio = 0.1;
parms._activation = DeepLearningParameters.Activation.Tanh;
// Build a first model; all remaining models should be equal
dl = new DeepLearning(parms).trainModel().get();
ModelMetricsAutoEncoder mm = (ModelMetricsAutoEncoder) dl._output._training_metrics;
Assert.assertEquals(0.0712931422088762, mm._MSE, 1e-2);
assertTrue(dl.testJavaScoring(tfr, fr2 = dl.score(tfr), 1e-5));
} finally {
if (tfr != null)
tfr.remove();
if (dl != null)
dl.delete();
if (fr2 != null)
fr2.delete();
Scope.exit();
}
}
use of hex.deeplearning.DeepLearningModel.DeepLearningParameters in project h2o-3 by h2oai.
the class DeepLearningTest method testRowWeightsOne.
@Test
public void testRowWeightsOne() {
Frame tfr = null, vfr = null, pred = null, fr2 = null;
Scope.enter();
try {
tfr = parse_test_file("smalldata/junit/weights_all_ones.csv");
DKV.put(tfr);
DeepLearningParameters parms = new DeepLearningParameters();
parms._train = tfr._key;
parms._response_column = "response";
parms._weights_column = "weight";
parms._reproducible = true;
parms._seed = 0xdecaf;
parms._classification_stop = -1;
parms._l1 = 0.1;
parms._hidden = new int[] { 1 };
parms._epochs = 1;
// Build a first model; all remaining models should be equal
DeepLearningModel dl = new DeepLearning(parms).trainModel().get();
pred = dl.score(parms.train());
hex.ModelMetricsBinomial mm = hex.ModelMetricsBinomial.getFromDKV(dl, parms.train());
assertEquals(0.7592592592592592, mm.auc_obj()._auc, 1e-8);
double mse = dl._output._training_metrics.mse();
//Note: better results than non-shuffled
assertEquals(0.3148133418670781, mse, 1e-8);
// assertTrue(dl.testJavaScoring(tfr, fr2=dl.score(tfr, 1e-5)); //PUBDEV-1900
dl.delete();
} finally {
if (tfr != null)
tfr.remove();
if (vfr != null)
vfr.remove();
if (pred != null)
pred.remove();
if (fr2 != null)
fr2.remove();
}
Scope.exit();
}
use of hex.deeplearning.DeepLearningModel.DeepLearningParameters in project h2o-3 by h2oai.
the class DeepLearningTest method testConvergenceMisclassification.
@Test
public void testConvergenceMisclassification() {
Frame tfr = null;
DeepLearningModel dl = null;
DeepLearningModel dl2 = null;
try {
tfr = parse_test_file("./smalldata/iris/iris.csv");
DeepLearningParameters parms = new DeepLearningParameters();
parms._train = tfr._key;
parms._epochs = 1000000;
parms._response_column = "C5";
parms._reproducible = true;
parms._hidden = new int[] { 2, 2 };
parms._seed = 0xdecaf;
parms._variable_importances = true;
parms._score_duty_cycle = 1.0;
parms._score_interval = 0;
//don't stop based on absolute classification error
parms._classification_stop = -1;
//don't stop based on absolute classification error
parms._stopping_rounds = 2;
//don't stop based on absolute classification error
parms._stopping_metric = ScoreKeeper.StoppingMetric.misclassification;
parms._stopping_tolerance = 0.0;
dl = new DeepLearning(parms).trainModel().get();
Assert.assertTrue(dl.epoch_counter < parms._epochs);
} finally {
if (tfr != null)
tfr.delete();
if (dl != null)
dl.delete();
if (dl2 != null)
dl2.delete();
}
}
use of hex.deeplearning.DeepLearningModel.DeepLearningParameters in project h2o-3 by h2oai.
the class DeepLearningTest method testConvergenceAUC.
@Test
public void testConvergenceAUC() {
Frame tfr = null;
DeepLearningModel dl = null;
DeepLearningModel dl2 = null;
try {
tfr = parse_test_file("./smalldata/logreg/prostate.csv");
for (String s : new String[] { "CAPSULE" }) {
Vec resp = tfr.vec(s).toCategoricalVec();
tfr.remove(s).remove();
tfr.add(s, resp);
DKV.put(tfr);
}
DeepLearningParameters parms = new DeepLearningParameters();
parms._train = tfr._key;
parms._epochs = 1000000;
parms._response_column = "CAPSULE";
parms._reproducible = true;
parms._hidden = new int[] { 2, 2 };
parms._seed = 0xdecaf;
parms._variable_importances = true;
parms._score_duty_cycle = 1.0;
parms._score_interval = 0;
//don't stop based on absolute classification error
parms._classification_stop = -1;
//don't stop based on absolute classification error
parms._stopping_rounds = 2;
//don't stop based on absolute classification error
parms._stopping_metric = ScoreKeeper.StoppingMetric.AUC;
parms._stopping_tolerance = 0.0;
dl = new DeepLearning(parms).trainModel().get();
Assert.assertTrue(dl.epoch_counter < parms._epochs);
} finally {
if (tfr != null)
tfr.delete();
if (dl != null)
dl.delete();
if (dl2 != null)
dl2.delete();
}
}
use of hex.deeplearning.DeepLearningModel.DeepLearningParameters in project h2o-3 by h2oai.
the class DeepLearningTask method makeNeurons.
// Helper
private static Neurons[] makeNeurons(final DeepLearningModelInfo minfo, boolean training) {
DataInfo dinfo = minfo.data_info();
final DeepLearningParameters params = minfo.get_params();
final int[] h = params._hidden;
// input + hidden + output
Neurons[] neurons = new Neurons[h.length + 2];
// input
neurons[0] = new Neurons.Input(params, minfo.units[0], dinfo);
// hidden
for (int i = 0; i < h.length + (params._autoencoder ? 1 : 0); i++) {
int n = params._autoencoder && i == h.length ? minfo.units[0] : h[i];
switch(params._activation) {
case Tanh:
neurons[i + 1] = new Neurons.Tanh(n);
break;
case TanhWithDropout:
neurons[i + 1] = params._autoencoder && i == h.length ? new Neurons.Tanh(n) : new Neurons.TanhDropout(n);
break;
case Rectifier:
neurons[i + 1] = new Neurons.Rectifier(n);
break;
case RectifierWithDropout:
neurons[i + 1] = params._autoencoder && i == h.length ? new Neurons.Rectifier(n) : new Neurons.RectifierDropout(n);
break;
case Maxout:
neurons[i + 1] = new Neurons.Maxout(params, (short) 2, n);
break;
case MaxoutWithDropout:
neurons[i + 1] = params._autoencoder && i == h.length ? new Neurons.Maxout(params, (short) 2, n) : new Neurons.MaxoutDropout(params, (short) 2, n);
break;
case ExpRectifier:
neurons[i + 1] = new Neurons.ExpRectifier(n);
break;
case ExpRectifierWithDropout:
neurons[i + 1] = params._autoencoder && i == h.length ? new Neurons.ExpRectifier(n) : new Neurons.ExpRectifierDropout(n);
break;
}
}
if (!params._autoencoder) {
if (minfo._classification && minfo.get_params()._distribution != DistributionFamily.modified_huber)
neurons[neurons.length - 1] = new Neurons.Softmax(minfo.units[minfo.units.length - 1]);
else
neurons[neurons.length - 1] = new Neurons.Linear();
}
//copy parameters from NN, and set previous/input layer links
for (int i = 0; i < neurons.length; i++) {
neurons[i].init(neurons, i, params, minfo, training);
neurons[i]._input = neurons[0];
}
// for (Neurons n : neurons) Log.info(n.toString());
return neurons;
}
Aggregations