use of net.sourceforge.ondex.InvalidPluginArgumentException in project knetbuilder by Rothamsted.
the class Export method start.
public void start() throws InvalidPluginArgumentException {
noDuplicates = (Boolean) args.getUniqueValue(REMOVE_DUPLICATES_ARG);
boolean useLinks = (Boolean) args.getUniqueValue(LINKS_ARG);
boolean translateTaxID = (Boolean) args.getUniqueValue(TRANSLATE_TAXID_ARG);
boolean zipFile = (Boolean) args.getUniqueValue(ZIP_FILE_ARG);
Object min = args.getUniqueValue(MIN_REPORT_DEPTH_ARG);
if (min != null) {
cutoff = Integer.valueOf(min.toString());
}
try {
om = graph.getMetaData();
String pathDefinition = (String) args.getUniqueValue(ORDER_ARG);
metaDataPath = parsePathDefinition(pathDefinition);
// parse attributes
Object[] attArgs = args.getObjectValueArray(ATTRIB_ARG);
FieldArgumentParser parser = new FieldArgumentParser(useLinks, translateTaxID);
for (Object arg : attArgs) {
parser.parseArguments(arg.toString(), graph);
}
List<ONDEXEntityPath> paths = new ArrayList<ONDEXEntityPath>();
if (metaDataPath.get(0).get(0) instanceof ConceptClass) {
// indicates starts with concept
// Set<ONDEXConcept> ov = SetImpl.create(graph, s, ONDEXConcept.class, new SparseDefaultBitSet());
Set<ONDEXConcept> tmp = new HashSet<ONDEXConcept>();
for (MetaData md : metaDataPath.get(0)) {
tmp.addAll(graph.getConceptsOfConceptClass((ConceptClass) md));
/*Set<ONDEXConcept> temp = graph.getConceptsOfConceptClass((ConceptClass)md);
if(temp!=null){
ov = Set.or(ov, temp);
}*/
}
for (ONDEXConcept concept : tmp) {
ONDEXEntityPath route = new ONDEXEntityPath(concept);
processEntity(concept, route, paths, 0, ProcessingType.CONCEPT, false);
}
} else {
// therefore path.get(0) must be a RelationType
Set<ONDEXRelation> tmp = new HashSet<ONDEXRelation>();
// Set<ONDEXRelation> ov = SetImpl.create(graph, s, ONDEXRelation.class, new SparseDefaultBitSet());
for (MetaData md : metaDataPath.get(0)) {
tmp.addAll(graph.getRelationsOfRelationType((RelationType) md));
/*Set<ONDEXRelation> temp = graph.getRelationsOfRelationType((RelationType)md);
if(temp != null){
ov = Set.or(ov, temp);
}*/
}
Iterator<ONDEXRelation> ov = tmp.iterator();
while (ov.hasNext()) {
ONDEXRelation r = ov.next();
ONDEXEntityPath path = new ONDEXEntityPath(r);
processEntity(r, path, paths, 0, ProcessingType.INCOMING_RELATION, false);
processEntity(r, (ONDEXEntityPath) path.clone(), paths, 0, ProcessingType.OUTGOING_RELATION, true);
}
}
int maxLength = 0;
for (ONDEXEntityPath path : paths) {
if (path.getLength() > maxLength) {
maxLength = path.getLength();
}
}
if (noDuplicates) {
Set<ONDEXEntityPath> nonredundantRoutes = new HashSet<ONDEXEntityPath>(paths);
paths = new ArrayList<ONDEXEntityPath>(nonredundantRoutes);
}
File file = new File((String) args.getUniqueValue(FileArgumentDefinition.EXPORT_FILE));
OndexPathPrinter printer = new OndexPathPrinter(parser.getAttributeModel(), file, maxLength, graph, zipFile);
for (ONDEXEntityPath path : paths) {
printer.printPath(path);
}
printer.close();
} catch (Exception e) {
e.printStackTrace();
}
}
use of net.sourceforge.ondex.InvalidPluginArgumentException in project knetbuilder by Rothamsted.
the class Mapping method start.
@Override
public void start() throws InvalidPluginArgumentException, IOException {
eviType = graph.getMetaData().getEvidenceType(EVIDENCE_BLAST);
if (eviType == null) {
ONDEXEventHandler.getEventHandlerForSID(graph.getSID()).fireEventOccurred(new EvidenceTypeMissingEvent(RT_HAS_SIMILAR_SEQUENCE, Mapping.getCurrentMethodName()));
}
eValue = graph.getMetaData().getAttributeName(ATT_E_VALUE);
if (eValue == null) {
ONDEXEventHandler.getEventHandlerForSID(graph.getSID()).fireEventOccurred(new AttributeNameMissingEvent(RT_HAS_SIMILAR_SEQUENCE, Mapping.getCurrentMethodName()));
}
bitScore = graph.getMetaData().getAttributeName(ATT_BITSCORE);
if (bitScore == null) {
ONDEXEventHandler.getEventHandlerForSID(graph.getSID()).fireEventOccurred(new AttributeNameMissingEvent(ATT_BITSCORE, Mapping.getCurrentMethodName()));
}
AttributeName coverage = graph.getMetaData().getAttributeName(MetaData.ATT_COVERAGE);
if (coverage == null) {
coverage = graph.getMetaData().getFactory().createAttributeName(ATT_COVERAGE, Double.class);
}
// get the relationtypeset and evidencetype for this mapping
rtSet = graph.getMetaData().getRelationType(RT_HAS_SIMILAR_SEQUENCE);
if (rtSet == null) {
RelationType rt = graph.getMetaData().getRelationType(RT_HAS_SIMILAR_SEQUENCE);
if (rt == null) {
ONDEXEventHandler.getEventHandlerForSID(graph.getSID()).fireEventOccurred(new RelationTypeMissingEvent(RT_HAS_SIMILAR_SEQUENCE, Mapping.getCurrentMethodName()));
}
rtSet = graph.getMetaData().getFactory().createRelationType(RT_HAS_SIMILAR_SEQUENCE, rt);
}
seq_overlap = graph.getMetaData().getAttributeName(MetaData.ATT_OVERLAP);
if (seq_overlap == null) {
seq_overlap = graph.getMetaData().getFactory().createAttributeName(MetaData.ATT_OVERLAP, "Alignment overlap", Integer.class);
}
frame = graph.getMetaData().getAttributeName(MetaData.ATT_FRAME);
if (frame == null) {
frame = graph.getMetaData().getFactory().createAttributeName(MetaData.ATT_FRAME, "Translation fame", Integer.class);
}
query_length = graph.getMetaData().getAttributeName(MetaData.ATT_QUERY_LENGTH);
if (query_length == null) {
query_length = graph.getMetaData().getFactory().createAttributeName(MetaData.ATT_QUERY_LENGTH, "Length of query sequence", Integer.class);
}
target_length = graph.getMetaData().getAttributeName(MetaData.ATT_TARGET_LENGTH);
if (target_length == null) {
target_length = graph.getMetaData().getFactory().createAttributeName(MetaData.ATT_TARGET_LENGTH, "Length of target sequence", Integer.class);
}
HashSet<String> taxIds = new HashSet<String>();
for (String taxId : (String[]) args.getObjectValueArray(TAX_ID_ARG)) {
taxIds.add(taxId);
}
String sequenceType = (String) args.getUniqueValue(SEQ_TYPE_ARG);
Integer speciesSeqSize = (Integer) args.getUniqueValue(SPECIES_SEQUENCE_SIZE_ARG);
AttributeName att = graph.getMetaData().getAttributeName(sequenceType);
AttributeName taxId = graph.getMetaData().getAttributeName(ATT_TAXID);
// index for taxid -> set of conceptIDs
Map<Integer, Set<Integer>> index = new HashMap<Integer, Set<Integer>>();
Set<ONDEXConcept> allSequences = graph.getConceptsOfAttributeName(att);
Set<ONDEXConcept> allTaxId = graph.getConceptsOfAttributeName(taxId);
allSequences.retainAll(allTaxId);
// of all TaxIDs and their assigned concepts
System.out.println("Creating index");
for (ONDEXConcept queryConcept : allSequences) {
Attribute attribute = queryConcept.getAttribute(taxId);
if (attribute != null) {
String value = (String) attribute.getValue();
// if a tax is specified then limit
if (taxIds.size() == 0 || taxIds.contains(value)) {
int taxIdVal = Integer.valueOf(value);
Set<Integer> setCon = index.get(taxIdVal);
if (setCon == null) {
setCon = new HashSet<Integer>();
index.put(taxIdVal, setCon);
}
setCon.add(queryConcept.getId());
}
}
}
System.out.println("Paralog prediction found " + index.size() + " species in the database");
// filter out the databases too small to consider
// keys are TaxIDs
Iterator<Integer> taxids = index.keySet().iterator();
while (taxids.hasNext()) {
int taxIdVals = taxids.next();
Set<Integer> sequences = index.get(taxIdVals);
if (sequences.size() < speciesSeqSize) {
index.remove(taxIdVals);
}
}
System.out.println("Paralog prediction found " + index.size() + " qualifing species in the database");
Float evalue = ((Number) args.getUniqueValue(E_VALUE_ARG)).floatValue();
Integer bitscore = (Integer) args.getUniqueValue(BITSCORE_ARG);
Integer cutoff = (Integer) args.getUniqueValue(CUTOFF_ARG);
Float overlap = ((Number) args.getUniqueValue(OVERLAP_ARG)).floatValue();
String programDir = (String) args.getUniqueValue(PROGRAM_DIR_ARG);
System.out.println("Running paralog prediction");
DecypherAlignment dcAlign = null;
String algo = null;
if (sequenceType.equalsIgnoreCase(AA)) {
algo = DecypherAlignment.ALGO_BLASTP;
} else if (sequenceType.equalsIgnoreCase(NA)) {
algo = DecypherAlignment.ALGO_BLASTN;
} else {
fireEventOccurred(new WrongParameterEvent("Decypher can not Align these types of sequences :" + " " + sequenceType + "->" + sequenceType, getCurrentMethodName()));
}
try {
int dcbit = 0;
if (bitscore != null)
dcbit = bitscore;
dcAlign = new DecypherAlignment(net.sourceforge.ondex.config.Config.ondexDir, programDir, cutoff, overlap, evalue, dcbit, 60, true);
} catch (Exception e) {
e.printStackTrace();
}
int speciesNumber = 0;
int totalSpecies = index.keySet().size();
// keys are TaxIDs
Iterator<Integer> keys = index.keySet().iterator();
while (keys.hasNext()) {
int taxIdVals = keys.next();
speciesNumber++;
Set<Integer> sequences = index.get(taxIdVals);
fireEventOccurred(new GeneralOutputEvent("Finding paralogs for taxid: " + taxIdVals + " on " + sequences.size() + " sequences (" + speciesNumber + " of " + totalSpecies + ")", getCurrentMethodName()));
Set<ONDEXConcept> seqToBLAST = BitSetFunctions.create(graph, ONDEXConcept.class, sequences);
fireEventOccurred(new GeneralOutputEvent("Running " + SequenceAlignmentProgramArgumentDefinition.DECYPHER, getCurrentMethodName()));
try {
Collection<Match> resultList = dcAlign.query(graph, seqToBLAST, seqToBLAST, algo);
// maps queryID -> Set of matches
Map<Integer, Set<Match>> indexOnQueryId = new HashMap<Integer, Set<Match>>();
System.out.println("Indexing matches");
Iterator<Match> matches = resultList.iterator();
while (matches.hasNext()) {
Match match = matches.next();
Set<Match> qMatches = indexOnQueryId.get(match.getQueryId());
if (qMatches == null) {
qMatches = new HashSet<Match>();
indexOnQueryId.put(match.getQueryId(), qMatches);
}
qMatches.add(match);
}
// clear old results
resultList = null;
Iterator<Integer> querIt = indexOnQueryId.keySet().iterator();
while (querIt.hasNext()) {
int query = querIt.next();
ONDEXConcept queryConcept = graph.getConcept(query);
Iterator<Match> matchIt = indexOnQueryId.get(query).iterator();
while (matchIt.hasNext()) {
Match hit = matchIt.next();
if (query != hit.getTargetId() && (bitscore == null || bitscore <= hit.getScore())) {
// check for reciprical hits
Set<Match> recipricalHits = indexOnQueryId.get(hit.getTargetId());
boolean found = false;
if (recipricalHits != null) {
// nothing to look for
for (Match hitr : recipricalHits) {
if (hitr.getTargetId() == query)
found = true;
}
}
if (found) {
ONDEXConcept toConcept = graph.getConcept(hit.getTargetId());
ONDEXRelation relation = graph.getRelation(queryConcept, toConcept, rtSet);
// try if relation was already created
if (relation == null) {
// create a new relation between the two
// concepts
relation = graph.getFactory().createRelation(queryConcept, toConcept, rtSet, eviType);
relation.createAttribute(eValue, Double.valueOf(hit.getEValue()), false);
relation.createAttribute(bitScore, Double.valueOf(hit.getScore()), false);
relation.createAttribute(coverage, Double.valueOf(hit.geQueryCoverageSequence()), false);
relation.createAttribute(seq_overlap, Integer.valueOf(hit.getOverlapingLength()), false);
relation.createAttribute(frame, Integer.valueOf(hit.getQueryFrame()), false);
relation.createAttribute(query_length, Integer.valueOf(hit.getLengthOfQuerySequence()), false);
relation.createAttribute(target_length, Integer.valueOf(hit.getLengthOfTargetSequence()), false);
}
}
}
}
}
System.runFinalization();
} catch (MissingFileException e) {
e.printStackTrace();
} catch (AlgorithmNotSupportedException e) {
e.printStackTrace();
}
}
}
use of net.sourceforge.ondex.InvalidPluginArgumentException in project knetbuilder by Rothamsted.
the class Mapping method getMethod.
/**
* Returns the method which is definied by <code>METHOD_ARG<code> PluginArgument.
*
* @return IMethod - an implementation of the IMethod interface
* @throws MethodNotFoundException - if the method can't be found
*/
public IMethod getMethod(ONDEXGraph graph) throws MethodNotFoundException, InvalidPluginArgumentException {
String methodName = (String) args.getUniqueValue(ArgumentNames.METHOD_ARG);
IMethod method = null;
try {
Class<?> c = Class.forName(this.getClass().getPackage().getName() + ".method." + methodName);
if (c.getConstructors().length != 1) {
throw new MethodNotFoundException();
}
method = (IMethod) c.getConstructors()[0].newInstance(new Object[] { super.args.getUniqueValue(ArgumentNames.PROGRAM_DIR_ARG).toString(), super.args.getUniqueValue(ArgumentNames.PFAM_PATH_ARG).toString(), fastaFile.getAbsolutePath(), (String) super.args.getUniqueValue(ArgumentNames.EVALUE_ARG).toString(), (String) super.args.getUniqueValue(ArgumentNames.BIT_SCORE_ARG).toString(), (String) super.args.getUniqueValue(ArgumentNames.HMM_THRESHOLDS_ARG), graph, atSource, ccSource });
} catch (Exception e) {
e.printStackTrace();
}
return method;
}
use of net.sourceforge.ondex.InvalidPluginArgumentException in project knetbuilder by Rothamsted.
the class Mapping method start.
@Override
public void start() throws InvalidPluginArgumentException {
EvidenceType etIMPD = graph.getMetaData().getEvidenceType("IMPD");
ConceptClass ccProteinFamily = graph.getMetaData().getConceptClass(MetaData.CC_ProteinFamily);
DataSource dataSource = graph.getMetaData().getDataSource("Seq2Pfam");
if (dataSource == null) {
dataSource = graph.getMetaData().createDataSource("Seq2Pfam", "Sequence to Pfam Mapping", "Concepts created on the fly by sequence to pfam");
}
ccSource = graph.getMetaData().getConceptClass(args.getUniqueValue(ArgumentNames.CONCEPT_CLASS_ARG).toString());
atSource = graph.getMetaData().getAttributeName(args.getUniqueValue(ArgumentNames.ATTRIBUTE_ARG).toString());
rtMemberIsPartOf = graph.getMetaData().getRelationType(MetaData.RT_MEMBER_IS_PART_OF);
atrEvalue = graph.getMetaData().getAttributeName(MetaData.ATT_EVALUE);
atrBitscore = graph.getMetaData().getAttributeName(MetaData.ATT_BITSCORE);
atrDomainEvalue = graph.getMetaData().getFactory().createAttributeName("BD_Evalue", "Bestdomain EValue", Double.class);
atrDomainBitscore = graph.getMetaData().getFactory().createAttributeName("BD_Score", "Best domain Score", Double.class);
atrTranslationFrame = graph.getMetaData().getFactory().createAttributeName("TranslationFrame", "TranslationFrame", Integer.class);
fastaFile = new File(super.args.getUniqueValue("TmpDir").toString());
if (!((String) args.getUniqueValue(ArgumentNames.METHOD_ARG)).equalsIgnoreCase("decypher")) {
// create a fasta styled database
try {
fastaFile = File.createTempFile("fasta_" + new Date(System.currentTimeMillis()).toString().replaceAll(" ", "_").replaceAll(":", "_"), ".tmp", fastaFile);
fastaFile.mkdirs();
Set<ONDEXConcept> allConcepts = graph.getConceptsOfConceptClass(ccSource);
if (((String) args.getUniqueValue(ArgumentNames.METHOD_ARG)).equalsIgnoreCase("Hmmer") && atSource.getId().equalsIgnoreCase("NA")) {
try {
translateToFASTAFile(fastaFile, allConcepts, graph);
} catch (Exception e) {
// To change body of catch statement use File | Settings | File Templates.
e.printStackTrace();
}
} else {
BufferedWriter b = new BufferedWriter(new FileWriter(fastaFile));
int writtenSequences = 0;
for (ONDEXConcept c : allConcepts) {
if (this.addEntryToFastaDatabase(b, c, graph))
writtenSequences++;
}
b.close();
ONDEXEventHandler.getEventHandlerForSID(graph.getSID()).fireEventOccurred(new GeneralOutputEvent("Found " + writtenSequences + " sequences", Mapping.class.toString()));
}
} catch (IOException e) {
e.printStackTrace();
}
}
// resolve the method class
IMethod method;
try {
method = this.getMethod(graph);
} catch (MethodNotFoundException e1) {
ONDEXEventHandler.getEventHandlerForSID(graph.getSID()).fireEventOccurred(new GeneralOutputEvent("Unknown method (" + args.getUniqueValue(ArgumentNames.METHOD_ARG).toString() + "), check " + ArgumentNames.METHOD_ARG + " parameter. Using blast instead", Mapping.class.toString()));
super.args.addOption(ArgumentNames.METHOD_ARG, "Blast");
try {
method = getMethod(graph);
} catch (MethodNotFoundException e) {
ONDEXEventHandler.getEventHandlerForSID(graph.getSID()).fireEventOccurred(new GeneralOutputEvent("There are serious problems in locating the right mapping method", Mapping.class.toString()));
e.printStackTrace();
return;
}
}
LuceneEnv lenv = LuceneRegistry.sid2luceneEnv.get(graph.getSID());
try {
// get the machtes
Collection<HMMMatch> results = method.execute();
ONDEXEventHandler.getEventHandlerForSID(graph.getSID()).fireEventOccurred(new GeneralOutputEvent("Found " + results.size() + " theoretical matches", Mapping.class.toString()));
// create relations between protein and family
for (HMMMatch result : results) {
Set<ONDEXConcept> itResults = method.searchMatchingConceptsInLuceneEnvironment(lenv, // lenv.searchConceptByConceptAccessionExact(s,result.getTargetAccession());
result);
if (itResults == null || itResults.size() == 0) {
ONDEXEventHandler.getEventHandlerForSID(graph.getSID()).fireEventOccurred(new GeneralOutputEvent("Pfam family " + result + " was not found: creating new one", Mapping.class.toString()));
ONDEXConcept concept = graph.getFactory().createConcept(result.getHmmAccession(), dataSource, ccProteinFamily, etIMPD);
BitSet sbs = new BitSet();
sbs.set(concept.getId());
itResults = BitSetFunctions.create(graph, ONDEXConcept.class, sbs);
}
for (ONDEXConcept pfamConcept : itResults) {
if (pfamConcept instanceof LuceneConcept) {
pfamConcept = ((LuceneConcept) pfamConcept).getParent();
}
ONDEXConcept from = graph.getConcept(result.getTargetId());
ONDEXRelation relation = graph.getRelation(from, pfamConcept, rtMemberIsPartOf);
if (relation == null) {
relation = graph.getFactory().createRelation(from, pfamConcept, rtMemberIsPartOf, method.getEvidenceType());
relation.createAttribute(atrEvalue, result.getEValue(), false);
if (result.getScore() != -1)
relation.createAttribute(atrBitscore, result.getScore(), false);
if (result.getBestDomainEvalue() != null)
relation.createAttribute(atrDomainEvalue, result.getBestDomainEvalue(), false);
if (result.getBestDomainScore() != null)
relation.createAttribute(atrDomainBitscore, result.getBestDomainScore(), false);
if (result.getQueryFrame() != null)
relation.createAttribute(atrTranslationFrame, result.getQueryFrame(), false);
} else {
Attribute evalue = relation.getAttribute(atrEvalue);
Attribute bitscore = relation.getAttribute(atrBitscore);
Attribute domain_evalue = relation.getAttribute(atrDomainEvalue);
Attribute domain_bitscore = relation.getAttribute(atrDomainBitscore);
Attribute frame = relation.getAttribute(atrTranslationFrame);
if (evalue == null || result.getEValue() < ((Number) (evalue.getValue())).doubleValue()) {
evalue.setValue(result.getEValue());
if (bitscore == null) {
if (result.getScore() > -1) {
relation.createAttribute(atrBitscore, result.getScore(), false);
}
} else {
if (result.getScore() > -1) {
// remove
relation.deleteAttribute(atrBitscore);
// to
// avoid
// confusion
} else {
bitscore.setValue(result.getScore());
}
}
addAttribute(result.getBestDomainEvalue(), domain_evalue, relation, atrDomainEvalue);
addAttribute(result.getBestDomainScore(), domain_bitscore, relation, atrDomainBitscore);
addAttribute(result.getQueryFrame(), frame, relation, atrTranslationFrame);
}
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
use of net.sourceforge.ondex.InvalidPluginArgumentException in project knetbuilder by Rothamsted.
the class Export method start.
// parameter to exclude all of selected feature
/* private static final String ALL = "ALL";
// export concepts that do not have relations, default true
private boolean writeIsolatedConcepts = true;
// mapping concepts to relation counts
private Map<Integer, Integer> conIdToRelationCounts = null;
// A list of the Attributes of Attribute values to exclude. String ALL =>
// Excludes all attributes.
private Set<String> excludeGDSSet = new HashSet<String>();
// A list of the ConceptClasses to be excluded in the export.
private Set<String> excludeCCSet = new HashSet<String>();
// A list of the RelationTypes to be excluded in the export.
private Set<String> excludeRt = new HashSet<String>();
*/
/**
* Other application dependent annotation data for this export, can be null.
*/
// protected Map<String, String> annotations = null;
/**
* Builds a file with Ondex Graph data exported to JSON variables for concepts (nodes), relations (edges)
* and other Item Info.
*
* @throws IOException
* @throws InvalidPluginArgumentException
*/
@Override
public void start() throws IOException, InvalidPluginArgumentException {
setOptionalArguments();
fireEventOccurred(new GeneralOutputEvent("Ready to Export.", "[Export - start]"));
// A jsimple-json object (JSONObject) for the node, edge data for cytoscapeJS.
JSONObject graphJson = new JSONObject();
/* Another jsimple-json object (JSONObject) to store all graph data (including metadata, concepts &
* relations' information). */
JSONObject allDataJson = new JSONObject();
// JSONArray objects to store all the ceoncepts (nodes) & relations (edges) to.
JSONArray conceptNodes = new JSONArray();
JSONArray relationEdges = new JSONArray();
// Set output File location for writing network graph JSON data & other graph metadata to.
graphFileWriter = getOutputFileForGraphJson();
try {
// Retrieving all the concepts & relations from the graph (the ONDEXGraph object).
if (graph != null) {
concepts = graph.getConcepts();
relations = graph.getRelations();
}
// Generate all graph metadata in JSON format.
JSONObject allGraphDataJson = getJsonMetadata();
allDataJson.put(JSONAttributeNames.ONDEXMETADATA, allGraphDataJson);
// Generate necessary node(s) data in JSON format for CytoscapeJS graph.
graphJson = getNodesJsonData(graphJson, conceptNodes, relations);
// Generate necessary edge(s) data in JSON format for CytoscapeJS graph.
graphJson = getEdgesJsonData(graphJson, relationEdges);
// Write graph JSON & metadata JSON to output file.
writeJSONToFile(graphJson, allDataJson, graphFileWriter);
} catch (Exception ex) {
throw new IOException("Failed to write Attribute values", ex);
}
fireEventOccurred(new GeneralOutputEvent("Finished JSON Export.", "[Export - start]"));
}
Aggregations