use of weka.core.WekaException in project labkit-ui by juglab.
the class TrainableSegmentationSegmenter method train.
@Override
public void train(List<Pair<ImgPlus<?>, Labeling>> trainingData) {
try {
initFeatureSettings(trainingData);
List<String> classes = collectLabels(trainingData.stream().map(Pair::getB).collect(Collectors.toList()));
sc.fiji.labkit.pixel_classification.classification.Segmenter segmenter = new sc.fiji.labkit.pixel_classification.classification.Segmenter(context, classes, featureSettings, new FastRandomForest());
segmenter.setUseGpu(useGpu);
Training training = segmenter.training();
for (Pair<ImgPlus<?>, Labeling> pair : trainingData) trainStack(training, classes, pair.getB(), pair.getA(), segmenter.features());
training.train();
this.segmenter = segmenter;
} catch (RuntimeException e) {
Throwable cause = e.getCause();
if (cause instanceof WekaException && cause.getMessage().contains("Not enough training instances"))
throw new CancellationException("The training requires some labeled regions.");
throw e;
}
}
use of weka.core.WekaException in project umple by umple.
the class ExplicitTestsetResultProducer method doRun.
/**
* Gets the results for a specified run number. Different run numbers
* correspond to different randomizations of the data. Results produced should
* be sent to the current ResultListener
*
* @param run the run number to get results for.
* @throws Exception if a problem occurs while getting the results
*/
@Override
public void doRun(int run) throws Exception {
if (getRawOutput()) {
if (m_ZipDest == null) {
m_ZipDest = new OutputZipper(m_OutputFile);
}
}
if (m_Instances == null) {
throw new Exception("No Instances set");
}
// Add in some fields to the key like run number, dataset name
Object[] seKey = m_SplitEvaluator.getKey();
Object[] key = new Object[seKey.length + 2];
key[0] = Utils.backQuoteChars(m_Instances.relationName());
key[1] = "" + run;
System.arraycopy(seKey, 0, key, 2, seKey.length);
if (m_ResultListener.isResultRequired(this, key)) {
// training set
Instances train = new Instances(m_Instances);
if (m_randomize) {
Random rand = new Random(run);
train.randomize(rand);
}
if (m_env == null) {
m_env = new Environment();
}
m_env.addVariable("RUN_NUMBER", "" + run);
// test set
String filename = createFilename(train);
File file = new File(filename);
if (!file.exists()) {
throw new WekaException("Test set '" + filename + "' not found!");
}
Instances test = DataSource.read(filename);
// can we set the class attribute safely?
if (train.numAttributes() == test.numAttributes()) {
test.setClassIndex(train.classIndex());
} else {
throw new WekaException("Train and test set (= " + filename + ") " + "differ in number of attributes: " + train.numAttributes() + " != " + test.numAttributes());
}
// test headers
if (!train.equalHeaders(test)) {
throw new WekaException("Train and test set (= " + filename + ") " + "are not compatible:\n" + train.equalHeadersMsg(test));
}
try {
Object[] seResults = m_SplitEvaluator.getResult(train, test);
Object[] results = new Object[seResults.length + 1];
results[0] = getTimestamp();
System.arraycopy(seResults, 0, results, 1, seResults.length);
if (m_debugOutput) {
String resultName = ("" + run + "." + Utils.backQuoteChars(train.relationName()) + "." + m_SplitEvaluator.toString()).replace(' ', '_');
resultName = Utils.removeSubstring(resultName, "weka.classifiers.");
resultName = Utils.removeSubstring(resultName, "weka.filters.");
resultName = Utils.removeSubstring(resultName, "weka.attributeSelection.");
m_ZipDest.zipit(m_SplitEvaluator.getRawResultOutput(), resultName);
}
m_ResultListener.acceptResult(this, key, results);
} catch (Exception e) {
// Save the train and test datasets for debugging purposes?
throw e;
}
}
}
use of weka.core.WekaException in project umple by umple.
the class Script method runScript.
/**
* Runs the specified script. All options that weren't "consumed" (like "-s"
* for the script filename), will be used as commandline arguments for the
* actual script.
*
* @param script the script object to use
* @param args the commandline arguments
* @throws Exception if execution fails
*/
public static void runScript(Script script, String[] args) throws Exception {
String tmpStr;
File scriptFile;
Vector<String> options;
int i;
if (Utils.getFlag('h', args) || Utils.getFlag("help", args)) {
System.out.println(makeOptionString(script));
} else {
// process options
tmpStr = Utils.getOption('s', args);
if (tmpStr.length() == 0) {
throw new WekaException("No script supplied!");
} else {
scriptFile = new File(tmpStr);
}
script.setOptions(args);
// remove empty elements from array
options = new Vector<String>();
for (i = 0; i < args.length; i++) {
if (args[i].length() > 0) {
options.add(args[i]);
}
}
// run script
script.run(scriptFile, options.toArray(new String[options.size()]));
}
}
use of weka.core.WekaException in project umple by umple.
the class AbstractOutput method printFooter.
/**
* Prints the footer to the buffer. This will also store the generated output
* in a file if an output file was specified.
*
* @throws Exception if check fails
*/
public void printFooter() throws Exception {
String error;
BufferedWriter writer;
if ((error = checkBasic()) != null) {
throw new WekaException(error);
}
doPrintFooter();
// write output to file
if (!m_OutputFile.isDirectory()) {
try {
writer = new BufferedWriter(new FileWriter(m_OutputFile));
writer.write(m_FileBuffer.toString());
writer.newLine();
writer.flush();
writer.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
use of weka.core.WekaException in project ambit-mirror by ideaconsult.
the class FilteredWekaModelBuilder method process.
public ModelQueryResults process(Algorithm algorithm) throws AmbitException {
List<Filter> filters = new ArrayList<Filter>();
Instances instances = trainingData;
if ((instances == null) || (instances.numInstances() == 0) || (instances.numAttributes() == 0))
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Empty dataset!");
Object weka = null;
try {
Class clazz = this.getClass().getClassLoader().loadClass(algorithm.getContent().toString());
weka = clazz.newInstance();
} catch (Exception x) {
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, x.getMessage(), x);
}
if (targetURI != null)
for (String t : targetURI) for (int i = 0; i < instances.numAttributes(); i++) if (instances.attribute(i).name().equals(t)) {
instances.setClassIndex(i);
break;
}
fclusterer = null;
fclassifier = null;
pca = null;
if (weka instanceof Clusterer) {
fclusterer = new FilteredClusterer();
fclusterer.setClusterer((Clusterer) weka);
} else if (weka instanceof Classifier) {
fclassifier = new FilteredClassifier();
fclassifier.setClassifier((Classifier) weka);
if (targetURI == null)
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "No target variable! " + OpenTox.params.target);
if (instances.classIndex() < 0)
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "No target variable! " + OpenTox.params.target);
if (weka instanceof IBk) {
String[] options = new String[3];
options[0] = "-K";
options[1] = "-20";
options[2] = "-X";
try {
((IBk) weka).setOptions(options);
} catch (Exception x) {
}
}
} else if (weka instanceof PrincipalComponents) {
pca = (PrincipalComponents) weka;
} else
throw new AmbitException(String.format("Unknown algorithm %s", algorithm.toString()));
String[] prm = algorithm.getParametersAsArray();
if (prm != null)
try {
if (fclassifier != null)
fclassifier.getClassifier().setOptions(prm);
else if (pca != null)
pca.setOptions(prm);
else if (fclusterer != null) {
fclusterer.getClusterer().getClass().getMethod("setOptions", new Class[] {}).invoke(fclusterer.getClusterer(), prm);
}
} catch (Exception x) {
Context.getCurrentLogger().warning("Error setting algorithm parameters, assuming defaults" + x.getMessage());
}
try {
// remove firstCompoundID attribute
String[] options = new String[2];
options[0] = "-R";
options[1] = "1";
Remove remove = new Remove();
remove.setOptions(options);
filters.add(remove);
} catch (Exception x) {
throw new AmbitException(x);
}
try {
// remove missing values
if (!hasCapability(Capability.MISSING_VALUES)) {
ReplaceMissingValues missing = new ReplaceMissingValues();
// can't make it working with RemoveWithValues...
String[] options = new String[1];
options[0] = "-M";
missing.setOptions(options);
filters.add(missing);
}
} catch (Exception x) {
throw new AmbitException(x);
}
if (instances.classIndex() >= 0)
try {
// num/nom support
if (instances.attribute(instances.classIndex()).isNominal()) {
if (!hasCapability(Capability.NOMINAL_CLASS)) {
if (hasCapability(Capability.BINARY_CLASS)) {
// nominal 2 binary
NominalToBinary nom2bin = new NominalToBinary();
String[] options = new String[2];
options[0] = "-R";
options[1] = Integer.toString(instances.classIndex());
nom2bin.setOptions(options);
filters.add(nom2bin);
}
}
} else if (instances.attribute(instances.classIndex()).isNumeric()) {
if (!hasCapability(Capability.NUMERIC_CLASS)) {
if (hasCapability(Capability.NOMINAL_CLASS)) {
// numeric to nominal, i.e. Discretize
Discretize num2nom = new Discretize();
String[] options = new String[2];
options[0] = "-R";
options[1] = Integer.toString(instances.classIndex());
num2nom.setOptions(options);
filters.add(num2nom);
}
}
// else all is well
} else if (instances.attribute(instances.classIndex()).isString()) {
if (hasCapability(Capability.NOMINAL_CLASS)) {
StringToNominal str2nom = new StringToNominal();
String[] options = new String[2];
options[0] = "-R";
options[1] = Integer.toString(instances.classIndex());
str2nom.setOptions(options);
filters.add(str2nom);
}
}
if (!hasCapability(Capability.MISSING_CLASS_VALUES)) {
RemoveWithValues missing = new RemoveWithValues();
String[] options = new String[3];
options[0] = "-M";
options[1] = "-C";
options[2] = Integer.toString(instances.classIndex());
missing.setOptions(options);
filters.add(missing);
}
if (fclassifier == null) {
// clusterer, ignore the class attr
try {
// remove firstCompoundID attribute
String[] options = new String[2];
options[0] = "-R";
options[1] = Integer.toString(instances.classIndex());
Remove remove = new Remove();
remove.setOptions(options);
filters.add(remove);
} catch (Exception x) {
throw new AmbitException(x);
}
}
} catch (Exception x) {
throw new AmbitException(x);
}
try {
// all besides the class (if set!)
filters.add(new Standardize());
} catch (Exception x) {
throw new AmbitException(x);
}
// now set the filters
MultiFilter filter = new MultiFilter();
filter.setFilters(filters.toArray(new Filter[filters.size()]));
Instances newInstances = instances;
if (fclassifier != null)
fclassifier.setFilter(filter);
else if (fclusterer != null)
fclusterer.setFilter(filter);
else {
try {
filter.setInputFormat(instances);
newInstances = Filter.useFilter(instances, filter);
} catch (Exception x) {
throw new AmbitException(x);
}
}
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyMMddhhmmss");
Date timestamp = new Date(System.currentTimeMillis());
String name = String.format("%s.%s.%s", simpleDateFormat.format(new Date(System.currentTimeMillis())), UUID.randomUUID().toString(), weka.getClass().getName());
ModelQueryResults m = new ModelQueryResults();
m.setParameters(parameters);
m.setId(null);
m.setContentMediaType(AlgorithmFormat.WEKA.getMediaType());
m.setName(name);
m.setAlgorithm(alg_reporter.getURI(algorithm));
AlgorithmURIReporter r = new AlgorithmURIReporter();
LiteratureEntry entry = new LiteratureEntry(name, algorithm == null ? weka.getClass().getName() : r.getURI(applicationRootReference.toString(), algorithm));
LiteratureEntry prediction = new LiteratureEntry(m.getName(), model_reporter.getURI(applicationRootReference.toString(), m));
prediction.setType(_type.Model);
Template predictors = null;
Template dependent = null;
PredictedVarsTemplate predicted = null;
if (fclusterer != null) {
try {
fclusterer.buildClusterer(newInstances);
} catch (Exception x) {
throw new AmbitException(x);
}
predicted = new PredictedVarsTemplate(name + "#Predicted");
Property property = new Property("Cluster", prediction);
property.setNominal(true);
predicted.add(property);
dependent = new Template("Empty");
predictors = new Template(name + "#Independent");
for (int i = 0; i < newInstances.numAttributes(); i++) {
property = createPropertyFromReference(new Reference(newInstances.attribute(i).name()), entry, referer);
property.setOrder(i + 1);
predictors.add(property);
}
} else if (fclassifier != null) {
try {
System.out.println(fclassifier.getClassifier().getCapabilities());
fclassifier.getCapabilities().testWithFail(newInstances);
} catch (Exception x) {
throw new AmbitException(x);
}
try {
// if (classifier instanceof LinearRegression) //don't do feature selection!
// classifier.setOptions(new String[] {"-S","1"});
StringBuilder evaluationString = new StringBuilder();
EvaluationStats<String> stats = new EvaluationStats<String>(EVType.crossvalidation, null);
Evaluation eval = new Evaluation(newInstances);
if (newInstances.numInstances() > 20) {
eval.crossValidateModel(fclassifier, newInstances, 10, new Random(1));
evaluationString.append("Crossvalidation 10 folds\n");
} else {
eval.crossValidateModel(fclassifier, newInstances, 2, new Random(1));
evaluationString.append("Crossvalidation 2 folds\n");
}
try {
evaluationString.append(eval.toSummaryString());
evaluationString.append("\n");
} catch (Exception x) {
}
try {
evaluationString.append(eval.toClassDetailsString());
evaluationString.append("\n");
evaluationString.append(eval.toMatrixString());
evaluationString.append("\n");
} catch (Exception x) {
}
try {
evaluationString.append(eval.weightedAreaUnderROC());
} catch (Exception x) {
}
try {
stats.setMAE(eval.meanAbsoluteError());
} catch (Exception x) {
}
try {
stats.setRMSE(eval.rootMeanSquaredError());
} catch (Exception x) {
}
try {
stats.setPctCorrect(eval.pctCorrect());
stats.setPctInCorrect(eval.pctIncorrect());
} catch (Exception x) {
}
stats.setContent(evaluationString.toString());
m.addEvaluation(stats);
stats = new EvaluationStats<String>(EVType.evaluation_training, null);
evaluationString = new StringBuilder();
fclassifier.buildClassifier(newInstances);
eval = new Evaluation(newInstances);
eval.evaluateModel(fclassifier, newInstances);
try {
evaluationString.append("\nTraining dataset statistics\n");
evaluationString.append(eval.toSummaryString());
evaluationString.append("\n");
} catch (Exception x) {
}
try {
evaluationString.append(eval.toMatrixString());
evaluationString.append("\n");
} catch (Exception x) {
}
try {
stats.setMAE(eval.meanAbsoluteError());
} catch (Exception x) {
}
try {
stats.setRMSE(eval.rootMeanSquaredError());
} catch (Exception x) {
}
try {
stats.setPctCorrect(eval.pctCorrect());
stats.setPctInCorrect(eval.pctIncorrect());
} catch (Exception x) {
}
stats.setContent(evaluationString.toString());
m.addEvaluation(stats);
} catch (WekaException x) {
throw new AmbitException(x);
} catch (Exception x) {
throw new AmbitException(x);
}
;
dependent = new Template(name + "#Dependent");
Property property = createPropertyFromReference(new Reference(newInstances.attribute(newInstances.classIndex()).name()), entry, referer);
dependent.add(property);
predicted = new PredictedVarsTemplate(name + "#Predicted");
Property predictedProperty = new Property(property.getName(), prediction);
predictedProperty.setLabel(property.getLabel());
predictedProperty.setUnits(property.getUnits());
predictedProperty.setClazz(property.getClazz());
predictedProperty.setNominal(property.isNominal());
predicted.add(predictedProperty);
predictedProperty.setEnabled(true);
if (supportsDistribution(fclassifier)) {
Property confidenceProperty = new Property(String.format("%s Confidence", property.getName()), prediction);
confidenceProperty.setLabel(Property.opentox_ConfidenceFeature);
confidenceProperty.setUnits("");
confidenceProperty.setClazz(Number.class);
confidenceProperty.setEnabled(true);
PropertyAnnotation<Property> a = new PropertyAnnotation<Property>();
a.setType(OT.OTClass.ModelConfidenceFeature.name());
a.setPredicate(OT.OTProperty.confidenceOf.name());
a.setObject(predictedProperty);
PropertyAnnotations aa = new PropertyAnnotations();
aa.add(a);
confidenceProperty.setAnnotations(aa);
predicted.add(confidenceProperty);
}
predictors = new Template(name + "#Independent");
for (int i = 0; i < newInstances.numAttributes(); i++) {
if ("CompoundURI".equals(newInstances.attribute(i).name()))
continue;
if (newInstances.classIndex() == i)
continue;
property = createPropertyFromReference(new Reference(newInstances.attribute(i).name()), entry, referer);
property.setOrder(i + 1);
predictors.add(property);
}
} else if (pca != null) {
try {
pca.setVarianceCovered(1.0);
pca.buildEvaluator(newInstances);
} catch (Exception x) {
throw new AmbitException(x);
}
Property property;
dependent = new Template("Empty");
predictors = new Template(name + "#Independent");
for (int i = 0; i < newInstances.numAttributes(); i++) {
if ("CompoundURI".equals(newInstances.attribute(i).name()))
continue;
if (newInstances.classIndex() == i)
continue;
property = createPropertyFromReference(new Reference(newInstances.attribute(i).name()), entry, referer);
property.setOrder(i + 1);
predictors.add(property);
}
predicted = new PredictedVarsTemplate(name + "#Predicted");
for (int i = 0; i < newInstances.numAttributes(); i++) {
if (newInstances.classIndex() == i)
continue;
property = createPropertyFromReference(new Reference(String.format("PCA_%d", i + 1)), entry, referer);
property.setClazz(Number.class);
property.setOrder(i + 1);
predicted.add(property);
}
}
m.setPredictors(predictors);
m.setDependent(dependent);
m.setPredicted(predicted);
try {
serializeModel(fclusterer == null ? fclassifier == null ? pca : fclassifier : fclusterer, newInstances, m);
} catch (Exception x) {
throw new AmbitException(x);
}
return m;
}
Aggregations