use of water.exceptions.H2OIllegalArgumentException in project h2o-3 by h2oai.
the class DeepLearningTest method testCheckpointBackwards.
@Test
public void testCheckpointBackwards() {
Frame tfr = null;
DeepLearningModel dl = null;
DeepLearningModel dl2 = null;
try {
tfr = parse_test_file("./smalldata/iris/iris.csv");
DeepLearningParameters parms = new DeepLearningParameters();
parms._train = tfr._key;
parms._epochs = 10;
parms._response_column = "C5";
parms._reproducible = true;
parms._hidden = new int[] { 2, 2 };
parms._seed = 0xdecaf;
parms._variable_importances = true;
dl = new DeepLearning(parms).trainModel().get();
DeepLearningParameters parms2 = (DeepLearningParameters) parms.clone();
parms2._epochs = 9;
parms2._checkpoint = dl._key;
try {
dl2 = new DeepLearning(parms2).trainModel().get();
Assert.fail("Should toss exception instead of reaching here");
} catch (H2OIllegalArgumentException ex) {
}
} finally {
if (tfr != null)
tfr.delete();
if (dl != null)
dl.delete();
if (dl2 != null)
dl2.delete();
}
}
use of water.exceptions.H2OIllegalArgumentException in project h2o-3 by h2oai.
the class DeepWaterAbstractIntegrationTest method testCheckpointBackwards.
@Test
public void testCheckpointBackwards() {
Frame tfr = null;
DeepWaterModel dl = null;
DeepWaterModel dl2 = null;
try {
tfr = parse_test_file("./smalldata/iris/iris.csv");
DeepWaterParameters p = new DeepWaterParameters();
p._backend = getBackend();
p._train = tfr._key;
p._epochs = 10;
p._response_column = "C5";
p._hidden = new int[] { 2, 2 };
p._seed = 0xdecaf;
dl = new DeepWater(p).trainModel().get();
DeepWaterParameters parms2 = (DeepWaterParameters) p.clone();
parms2._epochs = 9;
parms2._checkpoint = dl._key;
try {
dl2 = new DeepWater(parms2).trainModel().get();
Assert.fail("Should toss exception instead of reaching here");
} catch (H2OIllegalArgumentException ex) {
}
} finally {
if (tfr != null)
tfr.delete();
if (dl != null)
dl.delete();
if (dl2 != null)
dl2.delete();
}
}
use of water.exceptions.H2OIllegalArgumentException in project h2o-3 by h2oai.
the class GridSearch method start.
Job<Grid> start() {
final long gridSize = _hyperSpaceWalker.getMaxHyperSpaceSize();
Log.info("Starting gridsearch: estimated size of search space = " + gridSize);
// Create grid object and lock it
// Creation is done here, since we would like make sure that after leaving
// this function the grid object is in DKV and accessible.
final Grid<MP> grid;
Keyed keyed = DKV.getGet(_result);
if (keyed != null) {
if (!(keyed instanceof Grid))
throw new H2OIllegalArgumentException("Name conflict: tried to create a Grid using the ID of a non-Grid object that's already in H2O: " + _job._result + "; it is a: " + keyed.getClass());
grid = (Grid) keyed;
Frame specTrainFrame = _hyperSpaceWalker.getParams().train();
Frame oldTrainFrame = grid.getTrainingFrame();
if (oldTrainFrame != null && !specTrainFrame._key.equals(oldTrainFrame._key) || specTrainFrame.checksum() != oldTrainFrame.checksum())
throw new H2OIllegalArgumentException("training_frame", "grid", "Cannot append new models to a grid with different training input");
grid.write_lock(_job);
} else {
grid = new Grid<>(_result, _hyperSpaceWalker.getParams(), _hyperSpaceWalker.getHyperParamNames(), _hyperSpaceWalker.getParametersBuilderFactory().getFieldNamingStrategy());
grid.delete_and_lock(_job);
}
Model model = null;
HyperSpaceWalker.HyperSpaceIterator<MP> it = _hyperSpaceWalker.iterator();
long gridWork = 0;
if (gridSize > 0) {
//if total grid space is known, walk it all and count up models to be built (not subject to time-based or converge-based early stopping)
int count = 0;
while (it.hasNext(model) && (it.max_models() > 0 && count++ < it.max_models())) {
//only walk the first max_models models, if specified
try {
Model.Parameters parms = it.nextModelParameters(model);
gridWork += (parms._nfolds > 0 ? (parms._nfolds + 1) : 1) * parms.progressUnits();
} catch (Throwable ex) {
//swallow invalid combinations
}
}
} else {
//TODO: Future totally unbounded search: need a time-based progress bar
gridWork = Long.MAX_VALUE;
}
it.reset();
// Install this as job functions
return _job.start(new H2O.H2OCountedCompleter() {
@Override
public void compute2() {
gridSearch(grid);
tryComplete();
}
}, gridWork, it.max_runtime_secs());
}
use of water.exceptions.H2OIllegalArgumentException in project h2o-3 by h2oai.
the class HyperSpaceSearchCriteriaV99 method fillWithDefaults.
/**
* Fill with the default values from the corresponding Iced object.
*/
public S fillWithDefaults() {
HyperSpaceSearchCriteria defaults = null;
if (HyperSpaceSearchCriteria.Strategy.Cartesian == strategy) {
defaults = new HyperSpaceSearchCriteria.CartesianSearchCriteria();
} else if (HyperSpaceSearchCriteria.Strategy.RandomDiscrete == strategy) {
defaults = new HyperSpaceSearchCriteria.RandomDiscreteValueSearchCriteria();
} else {
throw new H2OIllegalArgumentException("search_criteria.strategy", strategy.toString());
}
fillFromImpl((I) defaults);
return (S) this;
}
use of water.exceptions.H2OIllegalArgumentException in project h2o-3 by h2oai.
the class FindHandler method find.
// called through reflection by RequestServer
@SuppressWarnings("unused")
public FindV3 find(int version, FindV3 find) {
Frame frame = find.key._fr;
// Peel out an optional column; restrict to this column
if (find.column != null) {
Vec vec = frame.vec(find.column);
if (vec == null)
throw new H2OColumnNotFoundArgumentException("column", frame, find.column);
find.key = new FrameV3(new Frame(new String[] { find.column }, new Vec[] { vec }));
}
// Convert the search string into a column-specific flavor
Vec[] vecs = frame.vecs();
double[] ds = new double[vecs.length];
for (int i = 0; i < vecs.length; i++) {
if (vecs[i].isCategorical()) {
int idx = ArrayUtils.find(vecs[i].domain(), find.match);
if (idx == -1 && vecs.length == 1)
throw new H2OCategoricalLevelNotFoundArgumentException("match", find.match, frame._key.toString(), frame.name(i));
ds[i] = idx;
} else if (vecs[i].isUUID()) {
throw H2O.unimpl();
} else if (vecs[i].isString()) {
throw H2O.unimpl();
} else if (vecs[i].isTime()) {
throw H2O.unimpl();
} else {
try {
ds[i] = find.match == null ? Double.NaN : Double.parseDouble(find.match);
} catch (NumberFormatException e) {
if (vecs.length == 1) {
// There's only one Vec and it's a numeric Vec and our search string isn't a number
IcedHashMapGeneric.IcedHashMapStringObject values = new IcedHashMapGeneric.IcedHashMapStringObject();
String msg = "Frame: " + frame._key.toString() + " as only one column, it is numeric, and the find pattern is not numeric: " + find.match;
values.put("frame_name", frame._key.toString());
values.put("column_name", frame.name(i));
values.put("pattern", find.match);
throw new H2OIllegalArgumentException(msg, msg, values);
}
// Do not match
ds[i] = Double.longBitsToDouble(0xcafebabe);
}
}
}
Find f = new Find(find.row, ds).doAll(frame);
find.prev = f._prev;
find.next = f._next == Long.MAX_VALUE ? -1 : f._next;
return find;
}
Aggregations