use of hex.Layer in project h2o-2 by h2oai.
the class NeuralNetMnistDrednet method build.
@Override
protected Layer[] build(Vec[] data, Vec labels, VecsInput inputStats, VecSoftmax outputStats) {
NeuralNet p = new NeuralNet();
Layer[] ls = new Layer[5];
p.hidden = new int[] { 1024, 1024, 2048 };
// p.hidden = new int[]{128,128,256};
ls[0] = new VecsInput(data, inputStats);
for (int i = 1; i < ls.length - 1; i++) ls[i] = new Layer.RectifierDropout(p.hidden[i - 1]);
ls[4] = new VecSoftmax(labels, outputStats);
p.rate = 0.01f;
p.rate_annealing = 1e-6f;
p.epochs = 1000;
p.activation = NeuralNet.Activation.RectifierWithDropout;
p.input_dropout_ratio = 0.2;
p.loss = NeuralNet.Loss.CrossEntropy;
p.max_w2 = 15;
p.momentum_start = 0.5f;
p.momentum_ramp = 1800000;
p.momentum_stable = 0.99f;
p.score_training = 1000;
p.score_validation = 10000;
p.l1 = .00001f;
p.l2 = .00f;
p.initial_weight_distribution = NeuralNet.InitialWeightDistribution.UniformAdaptive;
p.score_interval = 30;
for (int i = 0; i < ls.length; i++) {
ls[i].init(ls, i, p);
}
return ls;
}
use of hex.Layer in project h2o-2 by h2oai.
the class NeuralNet method correct.
// classification scoring
static boolean correct(Layer[] ls, Errors e, long[][] confusion) {
Softmax output = (Softmax) ls[ls.length - 1];
if (output.target() == -1)
return false;
for (Layer l : ls) l.fprop(-1, false);
float[] out = ls[ls.length - 1]._a;
int target = output.target();
for (int o = 0; o < out.length; o++) {
final boolean hitpos = (o == target);
final double t = hitpos ? 1 : 0;
final double d = t - out[o];
e.mean_square += d * d;
e.cross_entropy += hitpos ? -Math.log(out[o]) : 0;
}
float[] preds = new float[out.length + 1];
for (int i = 0; i < out.length; ++i) preds[i + 1] = out[i];
double[] data = new double[ls[0]._a.length];
preds[0] = ModelUtils.getPrediction(preds, data);
if (confusion != null) {
if (output.target() != Layer.missing_int_value)
confusion[output.target()][(int) preds[0]]++;
}
return preds[0] == output.target();
}
use of hex.Layer in project h2o-2 by h2oai.
the class NeuralNet method error.
// regression scoring
static void error(Layer[] ls, Errors e) {
Linear linear = (Linear) ls[ls.length - 1];
for (Layer l : ls) l.fprop(-1, false);
float[] output = ls[ls.length - 1]._a;
float[] target = linear.target();
e.mean_square = 0;
for (int o = 0; o < output.length; o++) {
final double d = target[o] - output[o];
e.mean_square += d * d;
}
}
Aggregations